run_xiaoniangao_follow.py 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/3/13
  4. import argparse
  5. import os
  6. import sys
  7. import time
  8. sys.path.append(os.getcwd())
  9. from common.common import Common
  10. from xiaoniangao.xiaoniangao_follow.xiaoniangao_follow import XiaoniangaoFollow
  11. def main(log_type, crawler, env):
  12. while True:
  13. try:
  14. if env == "dev":
  15. oss_endpoint = "out"
  16. else:
  17. oss_endpoint = "inner"
  18. Common.logger(log_type, crawler).info('开始抓取 小年糕 定向榜\n')
  19. XiaoniangaoFollow.get_follow_videos(log_type=log_type,
  20. crawler=crawler,
  21. strategy="定向爬虫策略",
  22. oss_endpoint=oss_endpoint,
  23. env=env)
  24. Common.del_logs(log_type, crawler)
  25. Common.logger(log_type, crawler).info('抓取完一轮,休眠 1 分钟\n')
  26. time.sleep(60)
  27. except Exception as e:
  28. Common.logger(log_type, crawler).info(f"小年糕定向抓取异常:{e}\n")
  29. if __name__ == "__main__":
  30. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  31. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  32. parser.add_argument('--crawler') ## 添加参数
  33. parser.add_argument('--env') ## 添加参数
  34. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  35. main(log_type=args.log_type,
  36. crawler=args.crawler,
  37. env=args.env)