run_youtube_follow_scheduling.py 2.9 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. # -*- coding: utf-8 -*-
  2. # @Author: lierqiang
  3. # @Time: 2023/3/15
  4. import argparse
  5. import os
  6. import sys
  7. sys.path.append(os.getcwd())
  8. from common.common import Common
  9. from common.public import task_fun
  10. from youtube.youtube_follow.youtube_follow_scheduling import YoutubeAuthorScheduling
  11. def main(log_type, crawler, task, oss_endpoint, env):
  12. task = task_fun(task)
  13. Common.logger(log_type, crawler).info(f"{task}\n")
  14. try:
  15. Common.logger(log_type, crawler).info('开始抓取 youtube视频 定向榜\n')
  16. YoutubeAuthorScheduling.get_follow_videos(
  17. log_type=log_type,
  18. crawler=crawler,
  19. task=task,
  20. oss_endpoint=oss_endpoint,
  21. env=env,
  22. )
  23. Common.del_logs(log_type, crawler)
  24. Common.logger(log_type, crawler).info('抓取任务结束\n')
  25. except Exception as e:
  26. Common.logger(log_type, crawler).info(f"youtube视频异常,触发报警:{e}\n")
  27. if __name__ == "__main__":
  28. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  29. parser.add_argument('--log_type', default='author', type=str) ## 添加参数,注明参数类型
  30. parser.add_argument('--crawler', default='youtube') ## 添加参数
  31. parser.add_argument('--strategy', default='定向') ## 添加参数
  32. parser.add_argument('--task', default='') ## 添加参数
  33. parser.add_argument('--oss_endpoint', default='out') ## 添加参数
  34. parser.add_argument('--env', default='dev') ## 添加参数
  35. # parser.add_argument('--machine') ## 添加参数
  36. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  37. task = {
  38. 'task_dict': {'task_id': '17', 'task_name': '西瓜测试4.21', 'source': 'douyin', 'start_time': '1682010720000',
  39. 'interval': '24', 'mode': 'author',
  40. 'rule': {'duration': {'min': 40, 'max': 0}, 'play_cnt': {'min': 4000, 'max': 0},
  41. 'period': {'min': 10, 'max': 0}, 'fans_cnt': {'min': 0, 'max': 0},
  42. 'videos_cnt': {'min': 0, 'max': 0}, 'like_cnt': {'min': 0, 'max': 0},
  43. 'width': {'min': 0, 'max': 0}, 'height': {'min': 0, 'max': 0}},
  44. 'spider_name': 'run_dy_author_scheduling', 'machine': 'aliyun', 'status': '0',
  45. 'create_time': '1682048632396', 'update_time': '1682048632396', 'operator': ''},
  46. 'rule_dict': {'duration': {'min': 0, 'max': 0}, 'play_cnt': {'min': 0, 'max': 0},
  47. 'period': {'min': 0, 'max': 0}, 'fans_cnt': {'min': 0, 'max': 0},
  48. 'videos_cnt': {'min': 0, 'max': 0},
  49. 'like_cnt': {'min': 0, 'max': 0}, 'width': {'min': 0, 'max': 0},
  50. 'height': {'min': 0, 'max': 0}}}
  51. main(log_type=args.log_type,
  52. crawler=args.crawler,
  53. task=task,
  54. oss_endpoint=args.oss_endpoint,
  55. env=args.env,
  56. )