run_xiaoniangao_hour_scheduling.py 2.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/3/15
  4. import argparse
  5. import datetime
  6. import os
  7. import sys
  8. sys.path.append(os.getcwd())
  9. from common.common import Common
  10. from xiaoniangao.xiaoniangao_hour.xiaoniangao_hour_scheduling import XiaoniangaoHour
  11. def main(log_type, crawler, task, oss_endpoint, env):
  12. Common.logger(log_type, crawler).info(f"{type(task)}:{task}")
  13. task = eval(task)
  14. Common.logger(log_type, crawler).info(f"{type(task)}\n")
  15. Common.logger(log_type, crawler).info(f"{task}\n")
  16. Common.logger(log_type, crawler).info(f"{oss_endpoint}")
  17. Common.logger(log_type, crawler).info(f"{env}")
  18. # 获取符合规则的视频,写入小时级数据_feeds
  19. # XiaoniangaoHour.get_videoList(log_type, crawler, env)
  20. # now = datetime.datetime.now()
  21. # if now.hour == 10 and 0 <= now.minute <= 10:
  22. # Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  23. # XiaoniangaoHour.update_videoList(log_type=log_type,
  24. # crawler=crawler,
  25. # strategy="小时榜爬虫策略",
  26. # oss_endpoint=oss_endpoint,
  27. # env=env)
  28. #
  29. # elif now.hour == 15 and now.minute <= 10:
  30. # Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  31. # XiaoniangaoHour.update_videoList(log_type=log_type,
  32. # crawler=crawler,
  33. # strategy="小时榜爬虫策略",
  34. # oss_endpoint=oss_endpoint,
  35. # env=env)
  36. #
  37. # elif now.hour == 20 and now.minute <= 10:
  38. # Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  39. # XiaoniangaoHour.update_videoList(log_type=log_type,
  40. # crawler=crawler,
  41. # strategy="小时榜爬虫策略",
  42. # oss_endpoint=oss_endpoint,
  43. # env=env)
  44. # Common.del_logs(log_type, crawler)
  45. if __name__ == "__main__":
  46. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  47. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  48. parser.add_argument('--crawler') ## 添加参数
  49. parser.add_argument('--task') ## 添加参数
  50. parser.add_argument('--oss_endpoint') ## 添加参数
  51. parser.add_argument('--env') ## 添加参数
  52. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  53. main(log_type=args.log_type,
  54. crawler=args.crawler,
  55. task=args.task,
  56. oss_endpoint=args.oss_endpoint,
  57. env=args.env)