|
@@ -4,6 +4,7 @@
|
|
import json
|
|
import json
|
|
import time
|
|
import time
|
|
import asyncio
|
|
import asyncio
|
|
|
|
+import traceback
|
|
|
|
|
|
from applications.config import Config
|
|
from applications.config import Config
|
|
from applications.log import logging
|
|
from applications.log import logging
|
|
@@ -51,9 +52,9 @@ class historyContentIdTask(object):
|
|
where download_status = 2
|
|
where download_status = 2
|
|
group by content_id
|
|
group by content_id
|
|
) VID on ART.content_id = VID.content_id and VID.cnt >= 3
|
|
) VID on ART.content_id = VID.content_id and VID.cnt >= 3
|
|
- WHERE ART.content_status = 0 and ART.process_times <= 3
|
|
|
|
|
|
+ WHERE ART.content_status = 0 and ART.process_times <= 3 AND ART.flow_pool_level = 'autoArticlePoolLevel1'
|
|
ORDER BY request_timestamp
|
|
ORDER BY request_timestamp
|
|
- LIMIT {self.history_coroutines};
|
|
|
|
|
|
+ LIMIT 1;
|
|
"""
|
|
"""
|
|
tasks = await self.mysql_client.async_select(sql=select_sql1)
|
|
tasks = await self.mysql_client.async_select(sql=select_sql1)
|
|
task_obj_list = [
|
|
task_obj_list = [
|
|
@@ -170,58 +171,60 @@ class historyContentIdTask(object):
|
|
config=self.config,
|
|
config=self.config,
|
|
content_id=content_id
|
|
content_id=content_id
|
|
)
|
|
)
|
|
|
|
+ fission_list = [[i] + [fission_dict[i]['fission_0_on_read']] for i in fission_dict.keys()]
|
|
|
|
+ sorted_fission_list = sorted(fission_list, key=lambda x: x[1], reverse=True)
|
|
download_videos_with_fission_info = []
|
|
download_videos_with_fission_info = []
|
|
- for video in download_videos:
|
|
|
|
- video["fission_0_rate"] = fission_dict.get(video['video_oss_path'], {}).get("fission_0_rate", 0)
|
|
|
|
- video["fission_0_on_read"] = fission_dict.get(video['video_oss_path'], {}).get("fission_0_on_read", 0)
|
|
|
|
|
|
+ for index, video in enumerate(download_videos[:3]):
|
|
|
|
+ video['video_oss_path'] = "https://rescdn.yishihui.com/" + sorted_fission_list[index][0]
|
|
|
|
+ video["fission_0_on_read"] = sorted_fission_list[index][1]
|
|
download_videos_with_fission_info.append(video)
|
|
download_videos_with_fission_info.append(video)
|
|
- # sorted_videos = sorted(download_videos_with_fission_info, key=lambda x: x['fission_0_rate'], reverse=True)
|
|
|
|
- sorted_videos = sorted(download_videos_with_fission_info, key=lambda x: x['fission_0_on_read'], reverse=True)
|
|
|
|
- video_list = sorted_videos[:3]
|
|
|
|
|
|
+ video_list = download_videos_with_fission_info
|
|
case _:
|
|
case _:
|
|
print("未传流量池信息")
|
|
print("未传流量池信息")
|
|
video_list = download_videos[:3]
|
|
video_list = download_videos[:3]
|
|
L = []
|
|
L = []
|
|
- for video_obj in video_list:
|
|
|
|
- params = {
|
|
|
|
- "videoPath": video_obj['video_oss_path'],
|
|
|
|
- "uid": video_obj['uid'],
|
|
|
|
- "title": kimi_title
|
|
|
|
- }
|
|
|
|
- publish_response = await publish_to_pq(params)
|
|
|
|
- video_id = publish_response['data']['id']
|
|
|
|
- response = await get_pq_video_detail(video_id)
|
|
|
|
- # time.sleep(2)
|
|
|
|
- obj = {
|
|
|
|
- "uid": video_obj['uid'],
|
|
|
|
- "source": video_obj['platform'],
|
|
|
|
- "kimiTitle": kimi_title,
|
|
|
|
- "videoId": response['data'][0]['id'],
|
|
|
|
- "videoCover": response['data'][0]['shareImgPath'],
|
|
|
|
- "videoPath": response['data'][0]['videoPath'],
|
|
|
|
- "videoOss": video_obj['video_oss_path']
|
|
|
|
- }
|
|
|
|
- L.append(obj)
|
|
|
|
- update_sql = f"""
|
|
|
|
- UPDATE {self.article_match_video_table}
|
|
|
|
- SET content_status = %s, response = %s, process_times = %s
|
|
|
|
- WHERE trace_id = %s and content_status = %s;
|
|
|
|
- """
|
|
|
|
- await self.mysql_client.async_insert(
|
|
|
|
- sql=update_sql,
|
|
|
|
- params=(
|
|
|
|
- self.TASK_PUBLISHED_STATUS,
|
|
|
|
- json.dumps(L, ensure_ascii=False),
|
|
|
|
- process_times + 1,
|
|
|
|
- trace_id,
|
|
|
|
- self.TASK_PROCESSING_STATUS
|
|
|
|
- )
|
|
|
|
- )
|
|
|
|
- logging(
|
|
|
|
- code="9002",
|
|
|
|
- info="已经从历史文章更新",
|
|
|
|
- trace_id=trace_id
|
|
|
|
- )
|
|
|
|
|
|
+ for index, video_obj in enumerate(video_list, 1):
|
|
|
|
+ print(index)
|
|
|
|
+ print(json.dumps(video_obj, ensure_ascii=False, indent=4))
|
|
|
|
+ # params = {
|
|
|
|
+ # "videoPath": video_obj['video_oss_path'],
|
|
|
|
+ # "uid": video_obj['uid'],
|
|
|
|
+ # "title": kimi_title
|
|
|
|
+ # }
|
|
|
|
+ # publish_response = await publish_to_pq(params)
|
|
|
|
+ # video_id = publish_response['data']['id']
|
|
|
|
+ # response = await get_pq_video_detail(video_id)
|
|
|
|
+ # # time.sleep(2)
|
|
|
|
+ # obj = {
|
|
|
|
+ # "uid": video_obj['uid'],
|
|
|
|
+ # "source": video_obj['platform'],
|
|
|
|
+ # "kimiTitle": kimi_title,
|
|
|
|
+ # "videoId": response['data'][0]['id'],
|
|
|
|
+ # "videoCover": response['data'][0]['shareImgPath'],
|
|
|
|
+ # "videoPath": response['data'][0]['videoPath'],
|
|
|
|
+ # "videoOss": video_obj['video_oss_path']
|
|
|
|
+ # }
|
|
|
|
+ # L.append(obj)
|
|
|
|
+ # update_sql = f"""
|
|
|
|
+ # UPDATE {self.article_match_video_table}
|
|
|
|
+ # SET content_status = %s, response = %s, process_times = %s
|
|
|
|
+ # WHERE trace_id = %s and content_status = %s;
|
|
|
|
+ # """
|
|
|
|
+ # await self.mysql_client.async_insert(
|
|
|
|
+ # sql=update_sql,
|
|
|
|
+ # params=(
|
|
|
|
+ # self.TASK_PUBLISHED_STATUS,
|
|
|
|
+ # json.dumps(L, ensure_ascii=False),
|
|
|
|
+ # process_times + 1,
|
|
|
|
+ # trace_id,
|
|
|
|
+ # self.TASK_PROCESSING_STATUS
|
|
|
|
+ # )
|
|
|
|
+ # )
|
|
|
|
+ # logging(
|
|
|
|
+ # code="9002",
|
|
|
|
+ # info="已经从历史文章更新",
|
|
|
|
+ # trace_id=trace_id
|
|
|
|
+ # )
|
|
|
|
|
|
async def roll_back_content_status_when_fails(self, process_times, trace_id):
|
|
async def roll_back_content_status_when_fails(self, process_times, trace_id):
|
|
"""
|
|
"""
|
|
@@ -289,6 +292,8 @@ class historyContentIdTask(object):
|
|
info="history task 在发布的时候出现异常, error = {}".format(e),
|
|
info="history task 在发布的时候出现异常, error = {}".format(e),
|
|
trace_id=trace_id
|
|
trace_id=trace_id
|
|
)
|
|
)
|
|
|
|
+ error_msg = traceback.format_exc()
|
|
|
|
+ print(error_msg)
|
|
await self.roll_back_content_status_when_fails(
|
|
await self.roll_back_content_status_when_fails(
|
|
trace_id=trace_id,
|
|
trace_id=trace_id,
|
|
process_times=process_times
|
|
process_times=process_times
|