run_xiaoniangao_hour.py 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/3/15
  4. import argparse
  5. import datetime
  6. import os
  7. import sys
  8. sys.path.append(os.getcwd())
  9. from common.common import Common
  10. from xiaoniangao.xiaoniangao_hour.xiaoniangao_hour import XiaoniangaoHour
  11. def main(log_type, crawler, env):
  12. while True:
  13. if env == "dev":
  14. oss_endpoint = "out"
  15. else:
  16. oss_endpoint = "inner"
  17. # 获取符合规则的视频,写入小时级数据_feeds
  18. XiaoniangaoHour.get_videoList(log_type, crawler, env)
  19. now = datetime.datetime.now()
  20. if now.hour == 10 and 0 <= now.minute <= 10:
  21. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  22. XiaoniangaoHour.update_videoList(log_type=log_type,
  23. crawler=crawler,
  24. strategy="小时榜爬虫策略",
  25. oss_endpoint=oss_endpoint,
  26. env=env)
  27. elif now.hour == 15 and now.minute <= 10:
  28. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  29. XiaoniangaoHour.update_videoList(log_type=log_type,
  30. crawler=crawler,
  31. strategy="小时榜爬虫策略",
  32. oss_endpoint=oss_endpoint,
  33. env=env)
  34. elif now.hour == 20 and now.minute <= 10:
  35. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  36. XiaoniangaoHour.update_videoList(log_type=log_type,
  37. crawler=crawler,
  38. strategy="小时榜爬虫策略",
  39. oss_endpoint=oss_endpoint,
  40. env=env)
  41. Common.del_logs(log_type, crawler)
  42. if __name__ == "__main__":
  43. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  44. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  45. parser.add_argument('--crawler') ## 添加参数
  46. parser.add_argument('--env') ## 添加参数
  47. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  48. main(log_type=args.log_type,
  49. crawler=args.crawler,
  50. env=args.env)