history_task.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. from applications.match_algorithm.rank import get_title_oss_fission_list
  12. class historyContentIdTask(object):
  13. """
  14. 处理已经匹配过小程序的文章
  15. """
  16. TASK_PROCESSING_STATUS = 101
  17. TASK_INIT_STATUS = 0
  18. TASK_PUBLISHED_STATUS = 4
  19. def __init__(self, mysql_client):
  20. """
  21. :param mysql_client:
  22. """
  23. self.mysql_client = mysql_client
  24. self.config = Config()
  25. self.article_match_video_table = self.config.article_match_video_table
  26. self.article_text_table = self.config.article_text_table
  27. self.article_crawler_video_table = self.config.article_crawler_video_table
  28. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  29. self.history_coroutines = self.config.get_config_value("historyArticleCoroutines")
  30. async def get_tasks(self):
  31. """
  32. 获取任务
  33. :return:
  34. """
  35. select_sql1 = f"""
  36. SELECT
  37. ART.trace_id,
  38. ART.content_id,
  39. ART.flow_pool_level,
  40. ART.gh_id,
  41. ART.process_times
  42. FROM {self.article_match_video_table} ART
  43. JOIN (
  44. select content_id, count(1) as cnt
  45. from {self.article_crawler_video_table}
  46. where download_status = 2
  47. group by content_id
  48. ) VID on ART.content_id = VID.content_id and VID.cnt >= 3
  49. WHERE ART.content_status = 0 and ART.process_times <= 3
  50. ORDER BY request_timestamp
  51. LIMIT {self.history_coroutines};
  52. """
  53. tasks = await self.mysql_client.async_select(sql=select_sql1)
  54. task_obj_list = [
  55. {
  56. "trace_id": item[0],
  57. "content_id": item[1],
  58. "flow_pool_level": item[2],
  59. "gh_id": item[3],
  60. "process_times": item[4]
  61. } for item in tasks
  62. ]
  63. logging(
  64. code="9001",
  65. info="本次任务获取到 {} 条视频".format(len(task_obj_list)),
  66. data=task_obj_list
  67. )
  68. return task_obj_list
  69. async def get_video_list(self, content_id) -> list[dict]:
  70. """
  71. content_id
  72. :return:
  73. """
  74. sql = f"""
  75. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  76. FROM {self.article_crawler_video_table}
  77. WHERE content_id = '{content_id}' and download_status = 2
  78. ORDER BY score DESC;
  79. """
  80. res_tuple = await self.mysql_client.async_select(sql)
  81. if len(res_tuple) >= 3:
  82. return [
  83. {
  84. "platform": i[0],
  85. "play_count": i[1],
  86. "like_count": i[2],
  87. "video_oss_path": i[3],
  88. "cover_oss_path": i[4],
  89. "uid": i[5]
  90. }
  91. for i in res_tuple
  92. ]
  93. else:
  94. return []
  95. async def get_kimi_title(self, content_id):
  96. """
  97. 获取 kimiTitle
  98. :param content_id:
  99. :return:
  100. """
  101. select_sql = f"""
  102. select kimi_title from {self.article_text_table} where content_id = '{content_id}';
  103. """
  104. res_tuple = await self.mysql_client.async_select(select_sql)
  105. if res_tuple:
  106. return res_tuple[0][0]
  107. else:
  108. return False
  109. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  110. """
  111. :param new_content_status:
  112. :param trace_id:
  113. :param ori_content_status:
  114. :return:
  115. """
  116. update_sql = f"""
  117. UPDATE {self.article_match_video_table}
  118. SET content_status = %s, content_status_update_time = %s
  119. WHERE trace_id = %s and content_status = %s;
  120. """
  121. row_counts = await self.mysql_client.async_insert(
  122. sql=update_sql,
  123. params=(
  124. new_content_status,
  125. int(time.time()),
  126. trace_id,
  127. ori_content_status
  128. )
  129. )
  130. return row_counts
  131. async def publish_videos_to_pq(self, trace_id, flow_pool_level, kimi_title, gh_id, download_videos, process_times, content_id):
  132. """
  133. 发布至 pq
  134. :param content_id:
  135. :param process_times:
  136. :param trace_id:
  137. :param download_videos: 已下载的视频---> list [{}, {}, {}.... ]
  138. :param gh_id: 公众号 id ---> str
  139. :param kimi_title: kimi 标题 ---> str
  140. :param flow_pool_level: 流量池层级 ---> str
  141. :return:
  142. """
  143. match flow_pool_level:
  144. case "autoArticlePoolLevel4":
  145. # 冷启层, 全量做
  146. video_list = shuffle_list(download_videos)[:3]
  147. case "autoArticlePoolLevel3":
  148. # 次条,只针对具体账号做
  149. if self.gh_id_dict.get(gh_id):
  150. video_list = shuffle_list(download_videos)[:3]
  151. else:
  152. video_list = download_videos[:3]
  153. case "autoArticlePoolLevel2":
  154. video_list = []
  155. case "autoArticlePoolLevel1":
  156. # 头条内容,使用重排后结果
  157. fission_resort_list = await get_title_oss_fission_list(
  158. db_client=self.mysql_client,
  159. config=self.config,
  160. content_id=content_id
  161. )
  162. if fission_resort_list:
  163. total_video_list = fission_resort_list + download_videos
  164. logging(
  165. code=1106,
  166. info="查找裂变信息成功",
  167. trace_id=trace_id,
  168. data={
  169. "ori_list": download_videos[:3],
  170. "fission_list": fission_resort_list
  171. }
  172. )
  173. # for index, video in enumerate(download_videos[:3]):
  174. # # 使用fission_list中的oss路径替换, 若替换失败则使用原来的视频
  175. # try:
  176. # video['video_oss_path'] = sorted_fission_list[index][0]
  177. # video["fission_0_on_read"] = sorted_fission_list[index][1]
  178. # except IndexError:
  179. # continue
  180. # download_videos_with_fission_info.append(video)
  181. video_list = total_video_list[:3]
  182. else:
  183. # 未找到裂变信息,采用原来的顺序
  184. logging(
  185. code=1107,
  186. info="查找裂变信息失败",
  187. trace_id=trace_id,
  188. data={
  189. "ori_list": download_videos[:3]
  190. }
  191. )
  192. video_list = download_videos[:3]
  193. case _:
  194. print("未传流量池信息")
  195. video_list = download_videos[:3]
  196. L = []
  197. for video_obj in video_list:
  198. params = {
  199. "videoPath": video_obj['video_oss_path'],
  200. "uid": video_obj['uid'],
  201. "title": kimi_title
  202. }
  203. publish_response = await publish_to_pq(params)
  204. video_id = publish_response['data']['id']
  205. response = await get_pq_video_detail(video_id)
  206. # time.sleep(2)
  207. obj = {
  208. "uid": video_obj['uid'],
  209. "source": video_obj['platform'],
  210. "kimiTitle": kimi_title,
  211. "videoId": response['data'][0]['id'],
  212. "videoCover": response['data'][0]['shareImgPath'],
  213. "videoPath": response['data'][0]['videoPath'],
  214. "videoOss": video_obj['video_oss_path']
  215. }
  216. L.append(obj)
  217. update_sql = f"""
  218. UPDATE {self.article_match_video_table}
  219. SET content_status = %s, response = %s, process_times = %s
  220. WHERE trace_id = %s and content_status = %s;
  221. """
  222. await self.mysql_client.async_insert(
  223. sql=update_sql,
  224. params=(
  225. self.TASK_PUBLISHED_STATUS,
  226. json.dumps(L, ensure_ascii=False),
  227. process_times + 1,
  228. trace_id,
  229. self.TASK_PROCESSING_STATUS
  230. )
  231. )
  232. logging(
  233. code="9002",
  234. info="已经从历史文章更新",
  235. trace_id=trace_id
  236. )
  237. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  238. """
  239. 处理失败,回滚至初始状态,处理次数加 1
  240. :param process_times:
  241. :param trace_id:
  242. :return:
  243. """
  244. update_article_sql = f"""
  245. UPDATE {self.article_match_video_table}
  246. SET
  247. content_status = %s,
  248. content_status_update_time = %s,
  249. process_times = %s
  250. WHERE trace_id = %s and content_status = %s;
  251. """
  252. await self.mysql_client.async_insert(
  253. sql=update_article_sql,
  254. params=(
  255. self.TASK_INIT_STATUS,
  256. int(time.time()),
  257. process_times + 1,
  258. trace_id,
  259. self.TASK_PROCESSING_STATUS
  260. )
  261. )
  262. async def process_task(self, params):
  263. """
  264. 异步执行
  265. :param params:
  266. :return:
  267. """
  268. content_id = params['content_id']
  269. trace_id = params['trace_id']
  270. flow_pool_level = params['flow_pool_level']
  271. gh_id = params['gh_id']
  272. process_times = params['process_times']
  273. download_videos = await self.get_video_list(content_id=content_id)
  274. # time.sleep(3)
  275. if download_videos:
  276. # 修改状态为执行状态,获取该任务的锁
  277. affected_rows = await self.update_content_status(
  278. trace_id=trace_id,
  279. new_content_status=self.TASK_PROCESSING_STATUS,
  280. ori_content_status=self.TASK_INIT_STATUS
  281. )
  282. if affected_rows == 0:
  283. print("修改行数为 0,多个进程抢占同一个 task, 抢占失败,进程退出")
  284. return
  285. try:
  286. kimi_title = await self.get_kimi_title(content_id)
  287. await self.publish_videos_to_pq(
  288. flow_pool_level=flow_pool_level,
  289. kimi_title=kimi_title,
  290. gh_id=gh_id,
  291. trace_id=trace_id,
  292. download_videos=download_videos,
  293. process_times=process_times,
  294. content_id=content_id
  295. )
  296. except Exception as e:
  297. logging(
  298. code="5003",
  299. info="history task 在发布的时候出现异常, error = {}".format(e),
  300. trace_id=trace_id
  301. )
  302. await self.roll_back_content_status_when_fails(
  303. trace_id=trace_id,
  304. process_times=process_times
  305. )
  306. else:
  307. return
  308. async def deal(self):
  309. """
  310. 处理
  311. :return:
  312. """
  313. task_list = await self.get_tasks()
  314. logging(
  315. code="5002",
  316. info="History content_task Task Got {} this time".format(len(task_list)),
  317. function="History Contents Task"
  318. )
  319. if task_list:
  320. a = time.time()
  321. tasks = [self.process_task(params) for params in task_list]
  322. await asyncio.gather(*tasks)
  323. b = time.time()
  324. print("{} s 内处理了{}个任务".format(b - a, len(task_list)))
  325. else:
  326. print("暂时未获得历史已存在文章")