run_xiaoniangao_author_scheduling.py 2.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/4/20
  4. import argparse
  5. import os
  6. import sys
  7. sys.path.append(os.getcwd())
  8. from common.public import task_fun
  9. from common.common import Common
  10. from common.scheduling_db import MysqlHelper
  11. from xiaoniangao.xiaoniangao_author.xiaoniangao_author_scheduling import XiaoniangaoAuthorScheduling
  12. def main(log_type, crawler, task, oss_endpoint, env):
  13. task_dict = task_fun(task)['task_dict']
  14. rule_dict = task_fun(task)['rule_dict']
  15. task_id = task_dict['task_id']
  16. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  17. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  18. Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
  19. Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
  20. Common.logger(log_type, crawler).info(f"用户列表:\n{user_list}")
  21. Common.logger(log_type, crawler).info('开始抓取 小年糕 定向榜\n')
  22. XiaoniangaoAuthorScheduling.get_follow_videos(log_type=log_type,
  23. crawler=crawler,
  24. user_list=user_list,
  25. rule_dict=rule_dict,
  26. strategy="定向榜爬虫策略",
  27. oss_endpoint=oss_endpoint,
  28. env=env)
  29. Common.del_logs(log_type, crawler)
  30. Common.logger(log_type, crawler).info('抓取完一轮\n')
  31. if __name__ == "__main__":
  32. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  33. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  34. parser.add_argument('--crawler') ## 添加参数
  35. parser.add_argument('--task') ## 添加参数
  36. parser.add_argument('--oss_endpoint') ## 添加参数
  37. parser.add_argument('--env') ## 添加参数
  38. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  39. main(log_type=args.log_type,
  40. crawler=args.crawler,
  41. task=args.task,
  42. oss_endpoint=args.oss_endpoint,
  43. env=args.env)