1234567891011121314151617181920212223242526272829303132333435363738394041424344454647 |
- # -*- coding: utf-8 -*-
- # @Author: wangkun
- # @Time: 2023/4/20
- import argparse
- import os
- import sys
- sys.path.append(os.getcwd())
- from common.public import task_fun
- from common.common import Common
- from common.scheduling_db import MysqlHelper
- from xiaoniangao.xiaoniangao_author.xiaoniangao_author_scheduling import XiaoniangaoAuthorScheduling
- def main(log_type, crawler, task, oss_endpoint, env):
- task_dict = task_fun(task)['task_dict']
- rule_dict = task_fun(task)['rule_dict']
- task_id = task_dict['task_id']
- select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
- user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
- Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
- Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
- Common.logger(log_type, crawler).info(f"用户列表:\n{user_list}")
- Common.logger(log_type, crawler).info('开始抓取 小年糕 定向榜\n')
- XiaoniangaoAuthorScheduling.get_follow_videos(log_type=log_type,
- crawler=crawler,
- user_list=user_list,
- rule_dict=rule_dict,
- strategy="定向榜爬虫策略",
- oss_endpoint=oss_endpoint,
- env=env)
- Common.del_logs(log_type, crawler)
- Common.logger(log_type, crawler).info('抓取完一轮\n')
- if __name__ == "__main__":
- parser = argparse.ArgumentParser() ## 新建参数解释器对象
- parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
- parser.add_argument('--crawler') ## 添加参数
- parser.add_argument('--task') ## 添加参数
- parser.add_argument('--oss_endpoint') ## 添加参数
- parser.add_argument('--env') ## 添加参数
- args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
- main(log_type=args.log_type,
- crawler=args.crawler,
- task=args.task,
- oss_endpoint=args.oss_endpoint,
- env=args.env)
|