history_task.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. from applications.match_algorithm.rank import get_title_oss_fission_list
  12. class historyContentIdTask(object):
  13. """
  14. 处理已经匹配过小程序的文章
  15. """
  16. TASK_PROCESSING_STATUS = 101
  17. EXIT_STATUS = 97
  18. TASK_INIT_STATUS = 0
  19. TASK_PUBLISHED_STATUS = 4
  20. def __init__(self, mysql_client):
  21. """
  22. :param mysql_client:
  23. """
  24. self.mysql_client = mysql_client
  25. self.config = Config()
  26. self.article_match_video_table = self.config.article_match_video_table
  27. self.article_text_table = self.config.article_text_table
  28. self.article_crawler_video_table = self.config.article_crawler_video_table
  29. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  30. self.history_coroutines = self.config.get_config_value("historyArticleCoroutines")
  31. async def get_tasks(self):
  32. """
  33. 获取任务
  34. :return:
  35. """
  36. select_sql1 = f"""
  37. SELECT
  38. ART.trace_id,
  39. ART.content_id,
  40. ART.flow_pool_level,
  41. ART.gh_id,
  42. ART.process_times
  43. FROM {self.article_match_video_table} ART
  44. JOIN (
  45. select content_id, count(1) as cnt
  46. from {self.article_crawler_video_table}
  47. where download_status = 2
  48. group by content_id
  49. ) VID on ART.content_id = VID.content_id and VID.cnt >= 3
  50. WHERE ART.content_status = 0 and ART.process_times <= 3
  51. ORDER BY request_timestamp
  52. LIMIT {self.history_coroutines};
  53. """
  54. tasks = await self.mysql_client.async_select(sql=select_sql1)
  55. task_obj_list = [
  56. {
  57. "trace_id": item[0],
  58. "content_id": item[1],
  59. "flow_pool_level": item[2],
  60. "gh_id": item[3],
  61. "process_times": item[4]
  62. } for item in tasks
  63. ]
  64. logging(
  65. code="9001",
  66. info="本次任务获取到 {} 条视频".format(len(task_obj_list)),
  67. data=task_obj_list
  68. )
  69. return task_obj_list
  70. async def get_video_list(self, content_id) -> list[dict]:
  71. """
  72. content_id
  73. :return:
  74. """
  75. sql = f"""
  76. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  77. FROM {self.article_crawler_video_table}
  78. WHERE content_id = '{content_id}' and download_status = 2
  79. ORDER BY score DESC;
  80. """
  81. res_tuple = await self.mysql_client.async_select(sql)
  82. if len(res_tuple) >= 3:
  83. return [
  84. {
  85. "platform": i[0],
  86. "play_count": i[1],
  87. "like_count": i[2],
  88. "video_oss_path": i[3],
  89. "cover_oss_path": i[4],
  90. "uid": i[5]
  91. }
  92. for i in res_tuple
  93. ]
  94. else:
  95. return []
  96. async def get_kimi_title(self, content_id):
  97. """
  98. 获取 kimiTitle
  99. :param content_id:
  100. :return:
  101. """
  102. select_sql = f"""
  103. select kimi_title from {self.article_text_table} where content_id = '{content_id}';
  104. """
  105. res_tuple = await self.mysql_client.async_select(select_sql)
  106. if res_tuple:
  107. return res_tuple[0][0]
  108. else:
  109. return False
  110. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  111. """
  112. :param new_content_status:
  113. :param trace_id:
  114. :param ori_content_status:
  115. :return:
  116. """
  117. update_sql = f"""
  118. UPDATE {self.article_match_video_table}
  119. SET content_status = %s, content_status_update_time = %s
  120. WHERE trace_id = %s and content_status = %s;
  121. """
  122. row_counts = await self.mysql_client.async_insert(
  123. sql=update_sql,
  124. params=(
  125. new_content_status,
  126. int(time.time()),
  127. trace_id,
  128. ori_content_status
  129. )
  130. )
  131. return row_counts
  132. async def publish_videos_to_pq(self, trace_id, flow_pool_level, kimi_title, gh_id, download_videos, process_times, content_id):
  133. """
  134. 发布至 pq
  135. :param content_id:
  136. :param process_times:
  137. :param trace_id:
  138. :param download_videos: 已下载的视频---> list [{}, {}, {}.... ]
  139. :param gh_id: 公众号 id ---> str
  140. :param kimi_title: kimi 标题 ---> str
  141. :param flow_pool_level: 流量池层级 ---> str
  142. :return:
  143. """
  144. match flow_pool_level:
  145. case "autoArticlePoolLevel4":
  146. # 冷启层, 全量做
  147. video_list = shuffle_list(download_videos)[:3]
  148. case "autoArticlePoolLevel3":
  149. # 次条,只针对具体账号做
  150. if self.gh_id_dict.get(gh_id):
  151. video_list = shuffle_list(download_videos)[:3]
  152. else:
  153. video_list = download_videos[:3]
  154. case "autoArticlePoolLevel2":
  155. video_list = []
  156. case "autoArticlePoolLevel1":
  157. # 头条内容,使用重排后结果
  158. fission_resort_list = await get_title_oss_fission_list(
  159. db_client=self.mysql_client,
  160. config=self.config,
  161. content_id=content_id
  162. )
  163. if fission_resort_list:
  164. total_video_list = fission_resort_list + download_videos
  165. logging(
  166. code=1106,
  167. info="查找裂变信息成功",
  168. trace_id=trace_id,
  169. data={
  170. "ori_list": download_videos[:3],
  171. "fission_list": fission_resort_list
  172. }
  173. )
  174. video_list = total_video_list[:3]
  175. else:
  176. # 未找到裂变信息,采用原来的顺序
  177. logging(
  178. code=1107,
  179. info="查找裂变信息失败",
  180. trace_id=trace_id,
  181. data={
  182. "ori_list": download_videos[:3]
  183. }
  184. )
  185. video_list = download_videos[:3]
  186. case _:
  187. print("未传流量池信息")
  188. video_list = download_videos[:3]
  189. L = []
  190. for video_obj in video_list:
  191. params = {
  192. "videoPath": video_obj['video_oss_path'],
  193. "uid": video_obj['uid'],
  194. "title": kimi_title
  195. }
  196. publish_response = await publish_to_pq(params)
  197. video_id = publish_response['data']['id']
  198. response = await get_pq_video_detail(video_id)
  199. # time.sleep(2)
  200. obj = {
  201. "uid": video_obj['uid'],
  202. "source": video_obj['platform'],
  203. "kimiTitle": kimi_title,
  204. "videoId": response['data'][0]['id'],
  205. "videoCover": response['data'][0]['shareImgPath'],
  206. "videoPath": response['data'][0]['videoPath'],
  207. "videoOss": video_obj['video_oss_path']
  208. }
  209. L.append(obj)
  210. update_sql = f"""
  211. UPDATE {self.article_match_video_table}
  212. SET content_status = %s, response = %s, process_times = %s
  213. WHERE trace_id = %s and content_status = %s;
  214. """
  215. await self.mysql_client.async_insert(
  216. sql=update_sql,
  217. params=(
  218. self.TASK_PUBLISHED_STATUS,
  219. json.dumps(L, ensure_ascii=False),
  220. process_times + 1,
  221. trace_id,
  222. self.TASK_PROCESSING_STATUS
  223. )
  224. )
  225. logging(
  226. code="9002",
  227. info="已经从历史文章更新",
  228. trace_id=trace_id
  229. )
  230. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  231. """
  232. 处理失败,回滚至初始状态,处理次数加 1
  233. :param process_times:
  234. :param trace_id:
  235. :return:
  236. """
  237. update_article_sql = f"""
  238. UPDATE {self.article_match_video_table}
  239. SET
  240. content_status = %s,
  241. content_status_update_time = %s,
  242. process_times = %s
  243. WHERE trace_id = %s and content_status = %s;
  244. """
  245. await self.mysql_client.async_insert(
  246. sql=update_article_sql,
  247. params=(
  248. self.TASK_INIT_STATUS,
  249. int(time.time()),
  250. process_times + 1,
  251. trace_id,
  252. self.TASK_PROCESSING_STATUS
  253. )
  254. )
  255. async def check_title_whether_exit(self, content_id):
  256. """
  257. 校验文章是标题是否晋升 or 退场
  258. :return:
  259. """
  260. UP_LEVEL_STATUS = 1
  261. TITLE_EXIT_STATUS = -1
  262. sql = f"""
  263. SELECT lat.article_title, cstp.status
  264. FROM long_articles_text lat
  265. JOIN cold_start_title_pool cstp ON lat.article_title = cstp.title
  266. WHERE lat.content_id = '{content_id}';
  267. """
  268. result = await self.mysql_client.async_select(sql)
  269. if result:
  270. status = result[0][1]
  271. if status in {UP_LEVEL_STATUS, TITLE_EXIT_STATUS}:
  272. return True
  273. else:
  274. return False
  275. else:
  276. return False
  277. async def process_task(self, params):
  278. """
  279. 异步执行
  280. :param params:
  281. :return:
  282. """
  283. content_id = params['content_id']
  284. trace_id = params['trace_id']
  285. flow_pool_level = params['flow_pool_level']
  286. if flow_pool_level == "autoArticlePoolLevel4":
  287. exit_status = await self.check_title_whether_exit(content_id)
  288. if exit_status:
  289. # 修改状态为退出状态
  290. affected_rows = await self.update_content_status(
  291. trace_id=trace_id,
  292. new_content_status=self.EXIT_STATUS,
  293. ori_content_status=self.TASK_INIT_STATUS
  294. )
  295. if affected_rows == 0:
  296. print("修改行数为 0,多个进程抢占同一个 task, 抢占失败,进程退出")
  297. return
  298. gh_id = params['gh_id']
  299. process_times = params['process_times']
  300. download_videos = await self.get_video_list(content_id=content_id)
  301. # time.sleep(3)
  302. if download_videos:
  303. # 修改状态为执行状态,获取该任务的锁
  304. affected_rows = await self.update_content_status(
  305. trace_id=trace_id,
  306. new_content_status=self.TASK_PROCESSING_STATUS,
  307. ori_content_status=self.TASK_INIT_STATUS
  308. )
  309. if affected_rows == 0:
  310. print("修改行数为 0,多个进程抢占同一个 task, 抢占失败,进程退出")
  311. return
  312. try:
  313. kimi_title = await self.get_kimi_title(content_id)
  314. await self.publish_videos_to_pq(
  315. flow_pool_level=flow_pool_level,
  316. kimi_title=kimi_title,
  317. gh_id=gh_id,
  318. trace_id=trace_id,
  319. download_videos=download_videos,
  320. process_times=process_times,
  321. content_id=content_id
  322. )
  323. except Exception as e:
  324. logging(
  325. code="5003",
  326. info="history task 在发布的时候出现异常, error = {}".format(e),
  327. trace_id=trace_id
  328. )
  329. await self.roll_back_content_status_when_fails(
  330. trace_id=trace_id,
  331. process_times=process_times
  332. )
  333. else:
  334. return
  335. async def deal(self):
  336. """
  337. 处理
  338. :return:
  339. """
  340. task_list = await self.get_tasks()
  341. logging(
  342. code="5002",
  343. info="History content_task Task Got {} this time".format(len(task_list)),
  344. function="History Contents Task"
  345. )
  346. if task_list:
  347. a = time.time()
  348. tasks = [self.process_task(params) for params in task_list]
  349. await asyncio.gather(*tasks)
  350. b = time.time()
  351. print("{} s 内处理了{}个任务".format(b - a, len(task_list)))
  352. else:
  353. print("暂时未获得历史已存在文章")