alg_recsys_task_doudi_videos.py 6.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import sys
  4. import traceback
  5. root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
  6. if root_dir not in sys.path:
  7. sys.path.append(root_dir)
  8. print("******** sys.path ********")
  9. print(sys.path)
  10. from multiprocessing import Process
  11. from odps import ODPS
  12. from threading import Timer
  13. import threading
  14. from my_utils import RedisHelper, execute_sql_from_odps, request_post
  15. from my_config import set_config
  16. from log import Log
  17. import json
  18. from datetime import datetime
  19. from queue import Queue
  20. from tqdm import tqdm
  21. import time
  22. DOUDI_URL = "http://recommend-common-internal.piaoquantv.com/longvideoapi/openapi/recommend/updateFallBackVideoListV2"
  23. config_, _ = set_config()
  24. log_ = Log()
  25. def check_data(project, table, date, hour, mm) -> int:
  26. """检查数据是否准备好,输出数据条数"""
  27. odps = ODPS(
  28. access_id=config_.ODPS_CONFIG['ACCESSID'],
  29. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  30. project=project,
  31. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  32. connect_timeout=3000,
  33. read_timeout=500000,
  34. pool_maxsize=1000,
  35. pool_connections=1000
  36. )
  37. try:
  38. t = odps.get_table(name=table)
  39. log_.info(f"检查分区是否存在-dt={date}.{hour}")
  40. check_res = t.exist_partition(partition_spec=f'dt={date},hour={hour},minute={hour}0000')
  41. if check_res:
  42. sql = f'select * from {project}.{table} where dt = \"{date}\" and hour = \"{hour}\" and minute = \"{hour}0000\"'
  43. log_.info(sql)
  44. with odps.execute_sql(sql=sql).open_reader() as reader:
  45. data_count = reader.count
  46. else:
  47. log_.info("表{}分区{}不存在".format(table, date + hour))
  48. data_count = 0
  49. except Exception as e:
  50. log_.error("table:{},date:{}.{} no data. return data_count=0,报错原因是:{}".format(table, date, hour, e))
  51. data_count = 0
  52. return data_count
  53. def get_sql(project, table, date, hour, mm):
  54. sql = '''
  55. SELECT vid
  56. ,AVG(rank) AS score
  57. ,SUM(1) as cnt
  58. FROM (
  59. SELECT a.dt
  60. ,a.hour
  61. ,a.region_provience
  62. ,t1.videoid AS vid
  63. ,t1.pos + 1 AS rank -- pos 从 0 开始,所以 +1 作为 rank
  64. FROM {}.{} a
  65. LATERAL VIEW POSEXPLODE(SPLIT(a.videoid_arr,",")) t1 AS
  66. pos
  67. ,videoid
  68. WHERE a.dt = "{}"
  69. AND a.hour = "{}"
  70. AND a.region_provience IN ("上海","中国","云南","内蒙古","北京","吉林","四川","天津","宁夏","安徽","山东","山西","广东","广西","新疆","江苏","江西","河北","河南","浙江","海南","湖北","湖南","甘肃","福建","西藏","贵州","辽宁","重庆","陕西","青海","黑龙江")
  71. )
  72. GROUP BY vid
  73. ORDER BY AVG(rank)
  74. '''.format(
  75. project, table, date, hour
  76. )
  77. print("sql:" + sql)
  78. records = execute_sql_from_odps(project=project, sql=sql)
  79. video_list = []
  80. with records.open_reader() as reader:
  81. for record in reader:
  82. key = record['vid']
  83. score = record['score']
  84. video_list.append({'videoId': key, 'rovScore': float(score)})
  85. return video_list
  86. def main():
  87. try:
  88. date = sys.argv[1]
  89. hour = sys.argv[2]
  90. # mm = sys.argv[3]
  91. mm = "00"
  92. except Exception as e:
  93. date = datetime.now().strftime('%Y%m%d')
  94. hour = datetime.now().hour
  95. # mm = datetime.now().minute
  96. mm = "00"
  97. log_.info("没有读取到参数,采用系统时间:{}".format(e))
  98. log_.info("使用时间参数-日期:{},小时:{}".format(date, str(hour)))
  99. if hour in ["23", "00", "01", "02", "03", "04", "05"]:
  100. log_.info(f"hour={hour}不执行,直接返回。")
  101. return
  102. # 1 判断上游数据表是否生产完成
  103. project = "loghubods"
  104. table = "alg_recsys_recall_region_1h_v1"
  105. run_flag = True
  106. begin_ts = int(time.time())
  107. table_data_cnt = 0
  108. while run_flag:
  109. if int(time.time()) - begin_ts >= 60 * 40:
  110. log_.info("等待上游数据超过40分钟了,认为失败退出:过了{}秒。".format(int(time.time()) - begin_ts))
  111. sys.exit(1)
  112. table_data_cnt = check_data(project, table, date, hour, mm)
  113. if table_data_cnt == 0:
  114. log_.info("上游数据{}未就绪{},等待...".format(table, date))
  115. log_.info("等待10分钟")
  116. time.sleep(60 * 10)
  117. else:
  118. run_flag = False
  119. log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
  120. # 2 读取数据表 处理特征
  121. video_list = get_sql(project, table, date, hour, mm)
  122. # 3 通过url请求,写入后端。
  123. video_list = video_list[:2000]
  124. log_.info("video的数据量:{}".format(len(video_list)))
  125. log_.info("10个videos:{}".format(video_list[:10]))
  126. result = request_post(request_url=DOUDI_URL, request_data={'videos': video_list})
  127. log_.info("请求结果result={}".format(str(result)))
  128. if result is None:
  129. msg = "请求失败1"
  130. log_.error(msg)
  131. sys.exit(1)
  132. elif result['code'] == 0:
  133. msg = "请求成功"
  134. log_.info(msg)
  135. else:
  136. msg = "请求失败2"
  137. log_.error(msg)
  138. sys.exit(1)
  139. if __name__ == '__main__':
  140. log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  141. process = Process(target=main)
  142. process.start()
  143. # 等待子进程完成或超时
  144. timeout = 3600 * 1
  145. process.join(timeout=timeout) # 设置超时为3600秒(1小时)
  146. if process.is_alive():
  147. print("脚本执行时间超过1小时,执行失败,经过了{}秒。".format(timeout))
  148. process.terminate() # 终止子进程
  149. sys.exit(1) # 直接退出主进程并返回状态码999
  150. log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  151. # cd /root/zhangbo/rov-offline
  152. # python alg_recsys_task_doudi_videos.py 20240731 14
  153. """
  154. !!!!!!!!!!!!!! 通过url,给后端传送兜底视频。每小时执行一次,方案是从loghubods.alg_recsys_recall_region_1h_v1 的单路召回源中挑选视频。
  155. 更改字段:table 表名
  156. 两段sql 各种字段 注意分区是否有“分钟”
  157. record 各种字段
  158. if hour in ["00"]: 哪些小时不执行
  159. process.join(timeout=3600) 任务超时时间3600
  160. int(time.time()) - begin_ts >= 60*50 任务超时时间3000
  161. """