alg_recsys_rank_item_realtime_1day.py 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190
  1. # -*- coding: utf-8 -*-
  2. import traceback
  3. import datetime
  4. from odps import ODPS
  5. from threading import Timer
  6. from utils import RedisHelper, get_data_from_odps, send_msg_to_feishu
  7. from config import set_config
  8. from log import Log
  9. from alg_recsys_recall_4h_region_trend import records_process_for_list
  10. import json
  11. from datetime import datetime, timedelta
  12. import sys
  13. from utils import execute_sql_from_odps
  14. config_, _ = set_config()
  15. log_ = Log()
  16. redis_helper = RedisHelper()
  17. REDIS_PREFIX = "item_rt_fea_1day_"
  18. def process_and_store(row):
  19. video_id, json_str = row
  20. key = REDIS_PREFIX + str(video_id)
  21. expire_time = 24 * 3600 * 2
  22. redis_helper.set_data_to_redis(key, json_str, expire_time)
  23. # log_.info("video写入数据key={},value={}".format(key, json_str))
  24. def check_data(project, table, partition) -> int:
  25. """检查数据是否准备好,输出数据条数"""
  26. odps = ODPS(
  27. access_id=config_.ODPS_CONFIG['ACCESSID'],
  28. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  29. project=project,
  30. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  31. connect_timeout=3000,
  32. read_timeout=500000,
  33. pool_maxsize=1000,
  34. pool_connections=1000
  35. )
  36. try:
  37. t = odps.get_table(name=table)
  38. log_.info(f"检查分区是否存在-【 dt={partition} 】")
  39. check_res = t.exist_partition(partition_spec=f'dt={partition}')
  40. if check_res:
  41. sql = f'select * from {project}.{table} where dt = {partition}'
  42. log_.info(sql)
  43. with odps.execute_sql(sql=sql).open_reader() as reader:
  44. data_count = reader.count
  45. else:
  46. log_.info("表{}分区{}不存在".format(table, partition))
  47. data_count = 0
  48. except Exception as e:
  49. log_.error("table:{},partition:{} no data. return data_count=0:{}".format(table, partition, e))
  50. data_count = 0
  51. return data_count
  52. def get_sql(date, previous_date_str, project):
  53. sql = '''
  54. SELECT videoid
  55. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_pv))) AS view_pv_list_1day
  56. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_uv))) AS view_uv_list_1day
  57. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_pv))) AS play_pv_list_1day
  58. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_uv))) AS play_uv_list_1day
  59. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_pv))) AS share_pv_list_1day
  60. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_uv))) AS share_uv_list_1day
  61. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return_uv))) AS return_uv_list_1day
  62. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_view_uv))) AS p_view_uv_list_1day
  63. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_view_pv))) AS p_view_pv_list_1day
  64. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_return_uv))) AS p_return_uv_list_1day
  65. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",2day_share_uv))) AS share_uv_list_2day
  66. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",2day_share_pv))) AS share_pv_list_2day
  67. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",3day_share_uv))) AS share_uv_list_3day
  68. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",3day_share_pv))) AS share_pv_list_3day
  69. FROM (
  70. SELECT videoid
  71. ,dt
  72. ,SUM(view次数) AS view_pv
  73. ,SUM(view人数) AS view_uv
  74. ,SUM(play次数) AS play_pv
  75. ,SUM(play人数) AS play_uv
  76. ,SUM(share次数) AS share_pv
  77. ,SUM(share人数) AS share_uv
  78. ,SUM(回流人数) AS return_uv
  79. ,SUM(platform_view) AS p_view_uv
  80. ,SUM(platform_view_total) AS p_view_pv
  81. ,SUM(platform_return) AS p_return_uv
  82. ,SUM(lasttwodays_share) AS 2day_share_uv
  83. ,SUM(lasttwodays_share_total) AS 2day_share_pv
  84. ,SUM(lastthreedays_share) AS 3day_share_uv
  85. ,SUM(lastthreedays_share_total) AS 3day_share_pv
  86. FROM loghubods.video_data_each_hour_dataset_24h_total_apptype
  87. WHERE dt <= '{}23'
  88. AND dt >= '{}00'
  89. GROUP BY videoid
  90. ,dt
  91. )
  92. GROUP BY videoid
  93. '''.format(date, previous_date_str)
  94. print("sql:" + sql)
  95. records = execute_sql_from_odps(project=project, sql=sql)
  96. video_list = []
  97. with records.open_reader() as reader:
  98. for record in reader:
  99. video_id = record['videoid']
  100. m = dict()
  101. try:
  102. m["view_pv_list_1day"] = record['view_pv_list_1day']
  103. except Exception as e:
  104. log_.error(e)
  105. try:
  106. m["view_uv_list_1day"] = record['view_uv_list_1day']
  107. except Exception as e:
  108. log_.error(e)
  109. try:
  110. m["play_pv_list_1day"] = record['play_pv_list_1day']
  111. except Exception as e:
  112. log_.error(e)
  113. try:
  114. m["play_uv_list_1day"] = record['play_uv_list_1day']
  115. except Exception as e:
  116. log_.error(e)
  117. try:
  118. m["share_pv_list_1day"] = record['share_pv_list_1day']
  119. except Exception as e:
  120. log_.error(e)
  121. try:
  122. m["share_uv_list_1day"] = record['share_uv_list_1day']
  123. except Exception as e:
  124. log_.error(e)
  125. try:
  126. m["return_uv_list_1day"] = record['return_uv_list_1day']
  127. except Exception as e:
  128. log_.error(e)
  129. try:
  130. m["p_view_pv_list_1day"] = record['p_view_pv_list_1day']
  131. except Exception as e:
  132. log_.error(e)
  133. try:
  134. m["p_view_uv_list_1day"] = record['p_view_uv_list_1day']
  135. except Exception as e:
  136. log_.error(e)
  137. try:
  138. m["p_return_uv_list_1day"] = record['p_return_uv_list_1day']
  139. except Exception as e:
  140. log_.error(e)
  141. json_str = json.dumps(m)
  142. video_list.append([video_id, json_str])
  143. return video_list
  144. def h_timer_check():
  145. try:
  146. date = sys.argv[1]
  147. hour = sys.argv[2]
  148. except Exception as e:
  149. now_date = datetime.today()
  150. date = datetime.strftime(now_date, '%Y%m%d')
  151. hour = datetime.now().hour
  152. log_.info("没有读取到参数,采用系统时间,报错info:{}".format(e))
  153. # 1 判断上游数据表是否生产完成
  154. project = "loghubods"
  155. table = "video_data_each_hour_dataset_24h_total_apptype"
  156. partition = str(date) + str(hour)
  157. table_data_cnt = check_data(project, table, partition)
  158. if table_data_cnt == 0:
  159. log_.info("上游数据{}未就绪{},等待...".format(table, partition))
  160. Timer(60, h_timer_check).start()
  161. else:
  162. log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
  163. # 2 读取数据表 处理特征
  164. previous_date_str = (datetime.strptime(date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d")
  165. video_list = get_sql(date, previous_date_str, project)
  166. # 3 写入redis
  167. log_.info("video的数据量:{}".format(len(video_list)))
  168. records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
  169. if __name__ == '__main__':
  170. log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  171. h_timer_check()
  172. log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  173. # cd /root/zhangbo/rov-offline
  174. # python alg_recsys_rank_item_realtime_1day.py 20240117 20