history_task.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. import asyncio
  7. from applications.config import Config
  8. from applications.log import logging
  9. from applications.functions.pqFunctions import publish_to_pq, get_pq_video_detail
  10. from applications.functions.common import shuffle_list
  11. class historyContentIdTask(object):
  12. """
  13. 处理已经匹配过小程序的文章
  14. """
  15. TASK_PROCESSING_STATUS = 101
  16. TASK_INIT_STATUS = 0
  17. TASK_PUBLISHED_STATUS = 4
  18. def __init__(self, mysql_client):
  19. """
  20. :param mysql_client:
  21. """
  22. self.mysql_client = mysql_client
  23. self.config = Config()
  24. self.article_match_video_table = self.config.article_match_video_table
  25. self.article_text_table = self.config.article_text_table
  26. self.article_crawler_video_table = self.config.article_crawler_video_table
  27. self.gh_id_dict = json.loads(self.config.get_config_value("testAccountLevel2"))
  28. self.history_coroutines = self.config.get_config_value("historyArticleCoroutines")
  29. async def get_tasks(self):
  30. """
  31. 获取任务
  32. :return:
  33. """
  34. select_sql1 = f"""
  35. SELECT
  36. ART.trace_id,
  37. ART.content_id,
  38. ART.flow_pool_level,
  39. ART.gh_id,
  40. ART.process_times
  41. FROM {self.article_match_video_table} ART
  42. JOIN (
  43. select content_id, count(1) as cnt
  44. from {self.article_crawler_video_table}
  45. where download_status = 2
  46. group by content_id
  47. ) VID on ART.content_id = VID.content_id and VID.cnt >= 3
  48. WHERE ART.content_status = 0 and ART.process_times <= 3
  49. ORDER BY request_timestamp
  50. LIMIT {self.history_coroutines};
  51. """
  52. tasks = await self.mysql_client.async_select(sql=select_sql1)
  53. task_obj_list = [
  54. {
  55. "trace_id": item[0],
  56. "content_id": item[1],
  57. "flow_pool_level": item[2],
  58. "gh_id": item[3],
  59. "process_times": item[4]
  60. } for item in tasks
  61. ]
  62. logging(
  63. code="9001",
  64. info="本次任务获取到 {} 条视频".format(len(task_obj_list)),
  65. data=task_obj_list
  66. )
  67. return task_obj_list
  68. async def get_video_list(self, content_id):
  69. """
  70. content_id
  71. :return:
  72. """
  73. sql = f"""
  74. SELECT platform, play_count, like_count, video_oss_path, cover_oss_path, user_id
  75. FROM {self.article_crawler_video_table}
  76. WHERE content_id = '{content_id}' and download_status = 2
  77. ORDER BY score DESC;
  78. """
  79. res_tuple = await self.mysql_client.async_select(sql)
  80. if len(res_tuple) >= 3:
  81. return [
  82. {
  83. "platform": i[0],
  84. "play_count": i[1],
  85. "like_count": i[2],
  86. "video_oss_path": i[3],
  87. "cover_oss_path": i[4],
  88. "uid": i[5]
  89. }
  90. for i in res_tuple
  91. ]
  92. else:
  93. return []
  94. async def get_kimi_title(self, content_id):
  95. """
  96. 获取 kimiTitle
  97. :param content_id:
  98. :return:
  99. """
  100. select_sql = f"""
  101. select kimi_title from {self.article_text_table} where content_id = '{content_id}';
  102. """
  103. res_tuple = await self.mysql_client.async_select(select_sql)
  104. if res_tuple:
  105. return res_tuple[0][0]
  106. else:
  107. return False
  108. async def update_content_status(self, new_content_status, trace_id, ori_content_status):
  109. """
  110. :param new_content_status:
  111. :param trace_id:
  112. :param ori_content_status:
  113. :return:
  114. """
  115. update_sql = f"""
  116. UPDATE {self.article_match_video_table}
  117. SET content_status = %s, content_status_update_time = %s
  118. WHERE trace_id = %s and content_status = %s;
  119. """
  120. row_counts = await self.mysql_client.async_insert(
  121. sql=update_sql,
  122. params=(
  123. new_content_status,
  124. int(time.time()),
  125. trace_id,
  126. ori_content_status
  127. )
  128. )
  129. return row_counts
  130. async def publish_videos_to_pq(self, trace_id, flow_pool_level, kimi_title, gh_id, download_videos, process_times):
  131. """
  132. 发布至 pq
  133. :param process_times:
  134. :param trace_id:
  135. :param download_videos: 已下载的视频---> list [{}, {}, {}.... ]
  136. :param gh_id: 公众号 id ---> str
  137. :param kimi_title: kimi 标题 ---> str
  138. :param flow_pool_level: 流量池层级 ---> str
  139. :return:
  140. """
  141. match flow_pool_level:
  142. case "autoArticlePoolLevel4":
  143. # 冷启层, 全量做
  144. video_list = shuffle_list(download_videos)[:3]
  145. case "autoArticlePoolLevel3":
  146. # 次条,只针对具体账号做
  147. if self.gh_id_dict.get(gh_id):
  148. video_list = shuffle_list(download_videos)[:3]
  149. else:
  150. video_list = download_videos[:3]
  151. case "autoArticlePoolLevel2":
  152. video_list = []
  153. case "autoArticlePoolLevel1":
  154. # 头条,先不做
  155. video_list = download_videos[:3]
  156. case _:
  157. print("未传流量池信息")
  158. video_list = download_videos[:3]
  159. L = []
  160. for video_obj in video_list:
  161. params = {
  162. "videoPath": video_obj['video_oss_path'],
  163. "uid": video_obj['uid'],
  164. "title": kimi_title
  165. }
  166. publish_response = await publish_to_pq(params)
  167. video_id = publish_response['data']['id']
  168. response = await get_pq_video_detail(video_id)
  169. # time.sleep(2)
  170. obj = {
  171. "uid": video_obj['uid'],
  172. "source": video_obj['platform'],
  173. "kimiTitle": kimi_title,
  174. "videoId": response['data'][0]['id'],
  175. "videoCover": response['data'][0]['shareImgPath'],
  176. "videoPath": response['data'][0]['videoPath'],
  177. "videoOss": video_obj['video_oss_path']
  178. }
  179. L.append(obj)
  180. update_sql = f"""
  181. UPDATE {self.article_match_video_table}
  182. SET content_status = %s, response = %s, process_times = %s
  183. WHERE trace_id = %s and content_status = %s;
  184. """
  185. await self.mysql_client.async_insert(
  186. sql=update_sql,
  187. params=(
  188. self.TASK_PUBLISHED_STATUS,
  189. json.dumps(L, ensure_ascii=False),
  190. process_times + 1,
  191. trace_id,
  192. self.TASK_PROCESSING_STATUS
  193. )
  194. )
  195. logging(
  196. code="9002",
  197. info="已经从历史文章更新",
  198. trace_id=trace_id
  199. )
  200. async def roll_back_content_status_when_fails(self, process_times, trace_id):
  201. """
  202. 处理失败,回滚至初始状态,处理次数加 1
  203. :param process_times:
  204. :param trace_id:
  205. :return:
  206. """
  207. update_article_sql = f"""
  208. UPDATE {self.article_match_video_table}
  209. SET
  210. content_status = %s,
  211. content_status_update_time = %s,
  212. process_times = %s
  213. WHERE trace_id = %s and content_status = %s;
  214. """
  215. await self.mysql_client.async_insert(
  216. sql=update_article_sql,
  217. params=(
  218. self.TASK_INIT_STATUS,
  219. int(time.time()),
  220. process_times + 1,
  221. trace_id,
  222. self.TASK_PROCESSING_STATUS
  223. )
  224. )
  225. async def process_task(self, params):
  226. """
  227. 异步执行
  228. :param params:
  229. :return:
  230. """
  231. content_id = params['content_id']
  232. trace_id = params['trace_id']
  233. flow_pool_level = params['flow_pool_level']
  234. gh_id = params['gh_id']
  235. process_times = params['process_times']
  236. download_videos = await self.get_video_list(content_id=content_id)
  237. # time.sleep(3)
  238. if download_videos:
  239. # 修改状态为执行状态,获取该任务的锁
  240. affected_rows = await self.update_content_status(
  241. trace_id=trace_id,
  242. new_content_status=self.TASK_PROCESSING_STATUS,
  243. ori_content_status=self.TASK_INIT_STATUS
  244. )
  245. if affected_rows == 0:
  246. print("修改行数为 0,多个进程抢占同一个 task, 抢占失败,进程退出")
  247. return
  248. try:
  249. kimi_title = await self.get_kimi_title(content_id)
  250. await self.publish_videos_to_pq(
  251. flow_pool_level=flow_pool_level,
  252. kimi_title=kimi_title,
  253. gh_id=gh_id,
  254. trace_id=trace_id,
  255. download_videos=download_videos,
  256. process_times=process_times
  257. )
  258. except Exception as e:
  259. logging(
  260. code="5003",
  261. info="history task 在发布的时候出现异常, error = {}".format(e),
  262. trace_id=trace_id
  263. )
  264. await self.roll_back_content_status_when_fails(
  265. trace_id=trace_id,
  266. process_times=process_times
  267. )
  268. else:
  269. return
  270. async def deal(self):
  271. """
  272. 处理
  273. :return:
  274. """
  275. task_list = await self.get_tasks()
  276. logging(
  277. code="5002",
  278. info="History content_task Task Got {} this time".format(len(task_list)),
  279. function="History Contents Task"
  280. )
  281. if task_list:
  282. a = time.time()
  283. tasks = [self.process_task(params) for params in task_list]
  284. await asyncio.gather(*tasks)
  285. b = time.time()
  286. print("{} s 内处理了{}个任务".format(b - a, len(task_list)))
  287. else:
  288. print("暂时未获得历史已存在文章")