alg_ad_feature_02_vidcid2action_redis.py 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import sys
  4. root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
  5. if root_dir not in sys.path:
  6. sys.path.append(root_dir)
  7. print("******** sys.path ********")
  8. print(sys.path)
  9. from odps import ODPS
  10. from threading import Timer
  11. import threading
  12. from my_utils import RedisHelper, execute_sql_from_odps
  13. from my_config import set_config
  14. from log import Log
  15. import json
  16. from datetime import datetime
  17. from queue import Queue
  18. from tqdm import tqdm
  19. config_, _ = set_config()
  20. log_ = Log()
  21. redis_helper = RedisHelper()
  22. REDIS_PREFIX = "redis:vid_cid_action:"
  23. EXPIRE_TIME = 6 * 3600
  24. def worker(queue, executor):
  25. while True:
  26. row = queue.get()
  27. if row is None: # 结束信号
  28. queue.task_done()
  29. break
  30. executor(row)
  31. queue.task_done()
  32. def records_process_for_list(records, executor, max_size=50, num_workers=10):
  33. # 创建一个线程安全的队列
  34. queue = Queue(maxsize=max_size) # 可以调整 maxsize 以控制内存使用
  35. # 设置线程池大小
  36. num_workers = num_workers
  37. # 启动工作线程
  38. threads = []
  39. for _ in range(num_workers):
  40. t = threading.Thread(target=worker, args=(queue, executor))
  41. t.start()
  42. threads.append(t)
  43. # 读取数据并放入队列
  44. for row in tqdm(records):
  45. queue.put(row)
  46. # 发送结束信号
  47. for _ in range(num_workers):
  48. queue.put(None)
  49. # 等待所有任务完成
  50. queue.join()
  51. # 等待所有工作线程结束
  52. for t in threads:
  53. t.join()
  54. def process_and_store(row):
  55. table_key, json_str = row
  56. key = REDIS_PREFIX + str(table_key)
  57. expire_time = EXPIRE_TIME
  58. redis_helper.set_data_to_redis(key, json_str, expire_time)
  59. def check_data(project, table, date, hour, mm) -> int:
  60. """检查数据是否准备好,输出数据条数"""
  61. odps = ODPS(
  62. access_id=config_.ODPS_CONFIG['ACCESSID'],
  63. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  64. project=project,
  65. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  66. connect_timeout=3000,
  67. read_timeout=500000,
  68. pool_maxsize=1000,
  69. pool_connections=1000
  70. )
  71. try:
  72. t = odps.get_table(name=table)
  73. log_.info(f"检查分区是否存在-【 dt={date} hh={hour}】")
  74. check_res = t.exist_partition(partition_spec=f'dt={date},hh={hour},mm={mm}')
  75. if check_res:
  76. sql = f'select * from {project}.{table} where dt = {date} and hh = {hour} and mm = {mm}'
  77. log_.info(sql)
  78. with odps.execute_sql(sql=sql).open_reader() as reader:
  79. data_count = reader.count
  80. else:
  81. log_.info("表{}分区{}/{}不存在".format(table, date, hour))
  82. data_count = 0
  83. except Exception as e:
  84. log_.error("table:{},date:{},hour:{} no data. return data_count=0,报错原因是:{}".format(table, date, hour, e))
  85. data_count = 0
  86. return data_count
  87. def get_sql(project, table, date, hour, mm):
  88. sql = '''
  89. SELECT vid, cid
  90. ,exp
  91. ,click
  92. ,order
  93. ,cpa
  94. FROM {}.{}
  95. WHERE dt = '{}'
  96. and hh = '{}'
  97. and mm = '{}'
  98. '''.format(
  99. project, table, date, hour, mm
  100. )
  101. print("sql:" + sql)
  102. records = execute_sql_from_odps(project=project, sql=sql)
  103. video_list = []
  104. with records.open_reader() as reader:
  105. for record in reader:
  106. key1 = record['vid']
  107. key2 = record['cid']
  108. key = key1+"_"+key2
  109. m = dict()
  110. try:
  111. m["exp"] = record['exp']
  112. except Exception as e:
  113. log_.error(e)
  114. try:
  115. m["click"] = record['click']
  116. except Exception as e:
  117. log_.error(e)
  118. try:
  119. m["order"] = record['order']
  120. except Exception as e:
  121. log_.error(e)
  122. try:
  123. m["cpa"] = record['cpa']
  124. except Exception as e:
  125. log_.error(e)
  126. json_str = json.dumps(m)
  127. video_list.append([key, json_str])
  128. return video_list
  129. def main():
  130. try:
  131. date = sys.argv[1]
  132. hour = sys.argv[2]
  133. # mm = sys.argv[3]
  134. mm = "00"
  135. except Exception as e:
  136. date = datetime.now().strftime('%Y%m%d')
  137. hour = datetime.now().hour
  138. # mm = datetime.now().minute
  139. mm = "00"
  140. log_.info("没有读取到参数,采用系统时间:{}".format(e))
  141. log_.info("使用时间参数-日期:{},小时:{}".format(date, str(hour)))
  142. if hour in ["00", "01"]:
  143. log_.info(f"hour={hour}不执行,直接返回。")
  144. return
  145. # 1 判断上游数据表是否生产完成
  146. project = "loghubods"
  147. table = "alg_ad_feature_vidcid_action"
  148. table_data_cnt = check_data(project, table, date, hour, mm)
  149. if table_data_cnt == 0:
  150. log_.info("上游数据{}未就绪{}/{},等待...".format(table, date, hour))
  151. Timer(60, main).start()
  152. else:
  153. log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
  154. # 2 读取数据表 处理特征
  155. video_list = get_sql(project, table, date, hour, mm)
  156. # 3 写入redis
  157. log_.info("video的数据量:{}".format(len(video_list)))
  158. records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
  159. if __name__ == '__main__':
  160. log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  161. main()
  162. log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  163. # cd /root/zhangbo/rov-offline
  164. # python alg_ad_feature_02_vidcid2action_redis.py 20240523 19 00
  165. """
  166. !!!!!!!!!!!!!!
  167. 更改字段:table 表名
  168. REDIS_PREFIX redis的key
  169. EXPIRE_TIME redis的过期时间
  170. sql 各种字段
  171. record 各种字段
  172. if hour in ["00"]: 哪些小时不执行
  173. """