run_shipinhao_search_scheduling.py 2.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/5/5
  4. import argparse
  5. import os
  6. import sys
  7. sys.path.append(os.getcwd())
  8. from common.public import task_fun
  9. from common.common import Common
  10. from common.scheduling_db import MysqlHelper
  11. from shipinhao.shipinhao_search.shipinhao_search_scheduling import ShipinhaoSearchScheduling
  12. def main(log_type, crawler, task, oss_endpoint, env):
  13. task_dict = task_fun(task)['task_dict']
  14. rule_dict = task_fun(task)['rule_dict']
  15. task_id = task_dict['task_id']
  16. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  17. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  18. Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
  19. Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
  20. Common.logger(log_type, crawler).info(f"用户列表:\n{user_list}")
  21. Common.logger(log_type, crawler).info('开始抓取 视频号 搜索爬虫策略\n')
  22. ShipinhaoSearchScheduling.get_search_videos(log_type=log_type,
  23. crawler=crawler,
  24. rule_dict=rule_dict,
  25. oss_endpoint=oss_endpoint,
  26. env=env)
  27. Common.del_logs(log_type, crawler)
  28. Common.logger(log_type, crawler).info('抓取完一轮\n')
  29. if __name__ == "__main__":
  30. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  31. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  32. parser.add_argument('--crawler') ## 添加参数
  33. parser.add_argument('--task') ## 添加参数
  34. parser.add_argument('--oss_endpoint') ## 添加参数
  35. parser.add_argument('--env') ## 添加参数
  36. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  37. main(log_type=args.log_type,
  38. crawler=args.crawler,
  39. task=args.task,
  40. oss_endpoint=args.oss_endpoint,
  41. env=args.env)