run_kuaishou_recommend_scheduling.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657
  1. # -*- coding: utf-8 -*-
  2. # @Author: lierqiang
  3. # @Time: 2023/4/21
  4. import argparse
  5. import os
  6. import sys
  7. sys.path.append(os.getcwd())
  8. from common.common import Common
  9. from kuaishou.kuaishou_recommend.kuaishou_recommend_shceduling import KuaiShouRecommendScheduling
  10. from common.public import task_fun
  11. def main(log_type, crawler, task, oss_endpoint, env):
  12. task = task_fun(task)
  13. try:
  14. Common.logger(log_type, crawler).info(f'开始抓取 {crawler}视频 推荐榜\n')
  15. KuaiShouRecommendScheduling.get_recommend_videos(log_type=log_type,
  16. crawler=crawler,
  17. task=task,
  18. oss_endpoint=oss_endpoint,
  19. env=env)
  20. Common.del_logs(log_type, crawler)
  21. Common.logger(log_type, crawler).info('抓取任务结束\n')
  22. except Exception as e:
  23. Common.logger(log_type, crawler).info(f"{crawler}视频异常,触发报警:{e}\n")
  24. # Feishu.bot(log_type, crawler, f"{e}")
  25. if __name__ == "__main__":
  26. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  27. parser.add_argument('--log_type', default='author') ## 添加参数,注明参数类型
  28. parser.add_argument('--crawler', default='kuaishou') ## 添加参数
  29. parser.add_argument('--strategy', default='定向抓取') ## 添加参数
  30. parser.add_argument('--task') ## 添加参数
  31. parser.add_argument('--oss_endpoint', default='outer') ## 添加参数
  32. parser.add_argument('--env', default='dev') ## 添加参数
  33. # parser.add_argument('--machine') ## 添加参数
  34. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  35. task = {
  36. 'task_dict': {'task_id': '17', 'task_name': '西瓜测试4.21', 'source': 'kuaishou', 'start_time': '1682010720000',
  37. 'interval': '24', 'mode': 'author',
  38. 'rule': {'duration': {'min': 40, 'max': 0}, 'play_cnt': {'min': 4000, 'max': 0},
  39. 'period': {'min': 10, 'max': 0}, 'fans_cnt': {'min': 0, 'max': 0},
  40. 'videos_cnt': {'min': 0, 'max': 0}, 'like_cnt': {'min': 0, 'max': 0},
  41. 'width': {'min': 0, 'max': 0}, 'height': {'min': 0, 'max': 0}},
  42. 'spider_name': 'run_dy_author_scheduling', 'machine': 'aliyun', 'status': '0',
  43. 'create_time': '1682048632396', 'update_time': '1682048632396', 'operator': ''},
  44. 'rule_dict': {'duration': {'min': 0, 'max': 0}, 'play_cnt': {'min': 0, 'max': 0},
  45. 'period': {'min': 0, 'max': 0}, 'fans_cnt': {'min': 0, 'max': 0}, 'videos_cnt': {'min': 0, 'max': 0},
  46. 'like_cnt': {'min': 0, 'max': 0}, 'width': {'min': 0, 'max': 0},
  47. 'height': {'min': 0, 'max': 0},'publish_time':{'min':0}}}
  48. main(log_type=args.log_type,
  49. crawler=args.crawler,
  50. task=task,
  51. oss_endpoint=args.oss_endpoint,
  52. env=args.env)