run_xiaoniangao_hour.py 2.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/3/15
  4. import argparse
  5. import datetime
  6. import os
  7. import sys
  8. sys.path.append(os.getcwd())
  9. from common.common import Common
  10. from xiaoniangao.xiaoniangao_hour.xiaoniangao_hour import XiaoniangaoHour
  11. def main(log_type, crawler, env):
  12. if env == "dev":
  13. oss_endpoint = "out"
  14. else:
  15. oss_endpoint = "inner"
  16. # 获取符合规则的视频,写入小时级数据_feeds
  17. XiaoniangaoHour.get_videoList(log_type, crawler, env)
  18. now = datetime.datetime.now()
  19. if now.hour == 10 and 0 <= now.minute <= 10:
  20. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  21. XiaoniangaoHour.update_videoList(log_type=log_type,
  22. crawler=crawler,
  23. strategy="小时榜爬虫策略",
  24. oss_endpoint=oss_endpoint,
  25. env=env)
  26. elif now.hour == 15 and now.minute <= 10:
  27. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  28. XiaoniangaoHour.update_videoList(log_type=log_type,
  29. crawler=crawler,
  30. strategy="小时榜爬虫策略",
  31. oss_endpoint=oss_endpoint,
  32. env=env)
  33. elif now.hour == 20 and now.minute <= 10:
  34. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  35. XiaoniangaoHour.update_videoList(log_type=log_type,
  36. crawler=crawler,
  37. strategy="小时榜爬虫策略",
  38. oss_endpoint=oss_endpoint,
  39. env=env)
  40. Common.del_logs(log_type, crawler)
  41. if __name__ == "__main__":
  42. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  43. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  44. parser.add_argument('--crawler') ## 添加参数
  45. parser.add_argument('--env') ## 添加参数
  46. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  47. main(log_type=args.log_type,
  48. crawler=args.crawler,
  49. env=args.env)