# -*- coding: utf-8 -*- # @Author: wangkun # @Time: 2023/3/15 import argparse import datetime import os import sys sys.path.append(os.getcwd()) from common.common import Common from xiaoniangao.xiaoniangao_hour.xiaoniangao_hour_scheduling import XiaoniangaoHour def main(log_type, crawler, task, oss_endpoint, env): Common.logger(log_type, crawler).info(f"{type(task)}:{task}") task = eval(task) Common.logger(log_type, crawler).info(f"{type(task)}\n") Common.logger(log_type, crawler).info(f"{task}\n") Common.logger(log_type, crawler).info(f"{oss_endpoint}") Common.logger(log_type, crawler).info(f"{env}") # 获取符合规则的视频,写入小时级数据_feeds # XiaoniangaoHour.get_videoList(log_type, crawler, env) # now = datetime.datetime.now() # if now.hour == 10 and 0 <= now.minute <= 10: # Common.logger(log_type, crawler).info("开始更新/下载上升榜") # XiaoniangaoHour.update_videoList(log_type=log_type, # crawler=crawler, # strategy="小时榜爬虫策略", # oss_endpoint=oss_endpoint, # env=env) # # elif now.hour == 15 and now.minute <= 10: # Common.logger(log_type, crawler).info("开始更新/下载上升榜") # XiaoniangaoHour.update_videoList(log_type=log_type, # crawler=crawler, # strategy="小时榜爬虫策略", # oss_endpoint=oss_endpoint, # env=env) # # elif now.hour == 20 and now.minute <= 10: # Common.logger(log_type, crawler).info("开始更新/下载上升榜") # XiaoniangaoHour.update_videoList(log_type=log_type, # crawler=crawler, # strategy="小时榜爬虫策略", # oss_endpoint=oss_endpoint, # env=env) # Common.del_logs(log_type, crawler) if __name__ == "__main__": parser = argparse.ArgumentParser() ## 新建参数解释器对象 parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型 parser.add_argument('--crawler') ## 添加参数 parser.add_argument('--task') ## 添加参数 parser.add_argument('--oss_endpoint') ## 添加参数 parser.add_argument('--env') ## 添加参数 args = parser.parse_args() ### 参数赋值,也可以通过终端赋值 main(log_type=args.log_type, crawler=args.crawler, task=args.task, oss_endpoint=args.oss_endpoint, env=args.env)