run_suisuiniannianyingfuqi_recommend_scheduling.py 2.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/4/13
  4. import argparse
  5. import os
  6. import random
  7. import sys
  8. sys.path.append(os.getcwd())
  9. from common.public import task_fun
  10. from common.common import Common
  11. from common.scheduling_db import MysqlHelper
  12. from suisuiniannianyingfuqi.suisuiniannianyingfuqi_recommend.suisuiniannianyingfuqi_recommend_scheduling import SuisuiniannianyingfuqiRecommendScheduling
  13. def main(log_type, crawler, task, env):
  14. task_dict = task_fun(task)['task_dict']
  15. rule_dict = task_fun(task)['rule_dict']
  16. task_id = task_dict['task_id']
  17. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  18. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  19. our_uid_list = []
  20. for user in user_list:
  21. our_uid_list.append(user["uid"])
  22. our_uid = random.choice(our_uid_list)
  23. Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
  24. Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
  25. Common.logger(log_type, crawler).info(f"用户列表:\n{user_list}")
  26. Common.logger(log_type, crawler).info('开始抓取 岁岁年年迎福气小程序\n')
  27. SuisuiniannianyingfuqiRecommendScheduling.get_videoList(log_type=log_type,
  28. crawler=crawler,
  29. our_uid=our_uid,
  30. rule_dict=rule_dict,
  31. env=env)
  32. Common.del_logs(log_type, crawler)
  33. Common.logger(log_type, crawler).info('抓取完一轮\n')
  34. if __name__ == "__main__":
  35. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  36. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  37. parser.add_argument('--crawler') ## 添加参数
  38. parser.add_argument('--task') ## 添加参数
  39. # parser.add_argument('--oss_endpoint') ## 添加参数
  40. parser.add_argument('--env') ## 添加参数
  41. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  42. main(log_type=args.log_type,
  43. crawler=args.crawler,
  44. task=args.task,
  45. env=args.env)