alg_recsys_task_doudi_videos.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import sys
  4. import traceback
  5. from my_utils import request_post
  6. root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
  7. if root_dir not in sys.path:
  8. sys.path.append(root_dir)
  9. print("******** sys.path ********")
  10. print(sys.path)
  11. from multiprocessing import Process
  12. from odps import ODPS
  13. from threading import Timer
  14. import threading
  15. from my_utils import RedisHelper, execute_sql_from_odps
  16. from my_config import set_config
  17. from log import Log
  18. import json
  19. from datetime import datetime
  20. from queue import Queue
  21. from tqdm import tqdm
  22. import time
  23. DOUDI_URL = "http://recommend-common-internal.piaoquantv.com/longvideoapi/openapi/recommend/updateFallBackVideoListV2"
  24. config_, _ = set_config()
  25. log_ = Log()
  26. def check_data(project, table, date, hour, mm) -> int:
  27. """检查数据是否准备好,输出数据条数"""
  28. odps = ODPS(
  29. access_id=config_.ODPS_CONFIG['ACCESSID'],
  30. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  31. project=project,
  32. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  33. connect_timeout=3000,
  34. read_timeout=500000,
  35. pool_maxsize=1000,
  36. pool_connections=1000
  37. )
  38. try:
  39. t = odps.get_table(name=table)
  40. log_.info(f"检查分区是否存在-【 dt={date}")
  41. check_res = t.exist_partition(partition_spec=f'dt={date}')
  42. if check_res:
  43. sql = f'select * from {project}.{table} where dt = {date} and hour = \"{hour}\" and minute = \"{hour}0000\"'
  44. log_.info(sql)
  45. with odps.execute_sql(sql=sql).open_reader() as reader:
  46. data_count = reader.count
  47. else:
  48. log_.info("表{}分区{}不存在".format(table, date + hour))
  49. data_count = 0
  50. except Exception as e:
  51. log_.error("table:{},date:{} no data. return data_count=0,报错原因是:{}".format(table, date, hour, e))
  52. data_count = 0
  53. return data_count
  54. def get_sql(project, table, date, hour, mm):
  55. sql = '''
  56. SELECT vid
  57. ,AVG(rank) AS score
  58. ,SUM(1) as cnt
  59. FROM (
  60. SELECT a.dt
  61. ,a.hour
  62. ,a.region_provience
  63. ,t1.videoid AS vid
  64. ,t1.pos + 1 AS rank -- pos 从 0 开始,所以 +1 作为 rank
  65. FROM {}.{} a
  66. LATERAL VIEW POSEXPLODE(SPLIT(a.videoid_arr,",")) t1 AS
  67. pos
  68. ,videoid
  69. WHERE a.dt = "{}"
  70. AND a.hour = "{}"
  71. AND a.region_provience IN ("上海","中国","云南","内蒙古","北京","吉林","四川","天津","宁夏","安徽","山东","山西","广东","广西","新疆","江苏","江西","河北","河南","浙江","海南","湖北","湖南","甘肃","福建","西藏","贵州","辽宁","重庆","陕西","青海","黑龙江")
  72. )
  73. GROUP BY vid
  74. ORDER BY AVG(rank)
  75. '''.format(
  76. project, table, date, hour
  77. )
  78. print("sql:" + sql)
  79. records = execute_sql_from_odps(project=project, sql=sql)
  80. video_list = []
  81. with records.open_reader() as reader:
  82. for record in reader:
  83. key = record['vid']
  84. score = record['score']
  85. video_list.append({'videoId': key, 'rovScore': float(score)})
  86. return video_list
  87. def main():
  88. try:
  89. date = sys.argv[1]
  90. hour = sys.argv[2]
  91. # mm = sys.argv[3]
  92. mm = "00"
  93. except Exception as e:
  94. date = datetime.now().strftime('%Y%m%d')
  95. hour = datetime.now().hour
  96. # mm = datetime.now().minute
  97. mm = "00"
  98. log_.info("没有读取到参数,采用系统时间:{}".format(e))
  99. log_.info("使用时间参数-日期:{},小时:{}".format(date, str(hour)))
  100. if hour in ["23", "00", "01", "02", "03", "04", "05"]:
  101. log_.info(f"hour={hour}不执行,直接返回。")
  102. return
  103. # 1 判断上游数据表是否生产完成
  104. project = "loghubods"
  105. table = "alg_recsys_recall_region_1h_v1"
  106. run_flag = True
  107. begin_ts = int(time.time())
  108. table_data_cnt = 0
  109. while run_flag:
  110. if int(time.time()) - begin_ts >= 60 * 40:
  111. log_.info("等待上游数据超过40分钟了,认为失败退出:过了{}秒。".format(int(time.time()) - begin_ts))
  112. sys.exit(1)
  113. table_data_cnt = check_data(project, table, date, hour, mm)
  114. if table_data_cnt == 0:
  115. log_.info("上游数据{}未就绪{},等待...".format(table, date))
  116. log_.info("等待10分钟")
  117. time.sleep(60 * 10)
  118. else:
  119. run_flag = False
  120. log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
  121. # 2 读取数据表 处理特征
  122. video_list = get_sql(project, table, date, hour, mm)
  123. # 3 通过url请求,写入后端。
  124. video_list = video_list[:2000]
  125. log_.info("video的数据量:{}".format(len(video_list)))
  126. result = request_post(request_url=DOUDI_URL, request_data={'videos': video_list})
  127. log_.info("请求结果result={}".format(str(result)))
  128. if result is None:
  129. msg = "请求失败1"
  130. log_.error(msg)
  131. sys.exit(1)
  132. elif result['code'] == 0:
  133. msg = "请求成功"
  134. log_.info(msg)
  135. else:
  136. msg = "请求失败2"
  137. log_.error(msg)
  138. sys.exit(1)
  139. if __name__ == '__main__':
  140. log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  141. process = Process(target=main)
  142. process.start()
  143. # 等待子进程完成或超时
  144. timeout = 3600 * 1
  145. process.join(timeout=timeout) # 设置超时为3600秒(1小时)
  146. if process.is_alive():
  147. print("脚本执行时间超过1小时,执行失败,经过了{}秒。".format(timeout))
  148. process.terminate() # 终止子进程
  149. sys.exit(1) # 直接退出主进程并返回状态码999
  150. log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  151. # cd /root/zhangbo/rov-offline
  152. # python alg_recsys_task_doudi_videos.py 20240731 14
  153. """
  154. !!!!!!!!!!!!!! 通过url,给后端传送兜底视频。每小时执行一次,方案是从loghubods.alg_recsys_recall_region_1h_v1 的单路召回源中挑选视频。
  155. 更改字段:table 表名
  156. 两段sql 各种字段 注意分区是否有“分钟”
  157. record 各种字段
  158. if hour in ["00"]: 哪些小时不执行
  159. process.join(timeout=3600) 任务超时时间3600
  160. int(time.time()) - begin_ts >= 60*50 任务超时时间3000
  161. """