run_xiaoniangao_hour_scheduling.py 3.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/3/15
  4. import argparse
  5. import datetime
  6. import os
  7. import random
  8. import sys
  9. sys.path.append(os.getcwd())
  10. from common.scheduling_db import MysqlHelper
  11. from common.common import Common
  12. from common.public import task_fun
  13. from xiaoniangao.xiaoniangao_hour.xiaoniangao_hour_scheduling import XiaoniangaoHourScheduling
  14. def main(log_type, crawler, task, env):
  15. task_dict = task_fun(task)['task_dict']
  16. rule_dict = task_fun(task)['rule_dict']
  17. task_id = task_dict['task_id']
  18. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  19. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  20. our_uid_list = []
  21. for user in user_list:
  22. our_uid_list.append(user["uid"])
  23. our_uid = random.choice(our_uid_list)
  24. Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
  25. Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
  26. Common.logger(log_type, crawler).info(f'开始抓取 {task_dict["task_name"]}\n')
  27. # 获取符合规则的视频,写入小时级数据_feeds
  28. for i in range(1, 101):
  29. try:
  30. Common.logger(log_type, crawler).info(f"正在抓取第{i}页")
  31. XiaoniangaoHourScheduling.get_videoList(log_type, crawler, rule_dict, env)
  32. except Exception as e:
  33. Common.logger(log_type, crawler).info(f"抓取第{i}页时异常:{e}\n")
  34. now = datetime.datetime.now()
  35. if now.hour == 10 and 0 <= now.minute <= 10:
  36. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  37. XiaoniangaoHourScheduling.update_videoList(log_type=log_type,
  38. crawler=crawler,
  39. rule_dict=rule_dict,
  40. our_uid=our_uid,
  41. env=env)
  42. elif now.hour == 15 and now.minute <= 10:
  43. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  44. XiaoniangaoHourScheduling.update_videoList(log_type=log_type,
  45. crawler=crawler,
  46. rule_dict=rule_dict,
  47. our_uid=our_uid,
  48. env=env)
  49. elif now.hour == 20 and now.minute <= 10:
  50. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  51. XiaoniangaoHourScheduling.update_videoList(log_type=log_type,
  52. crawler=crawler,
  53. rule_dict=rule_dict,
  54. our_uid=our_uid,
  55. env=env)
  56. Common.del_logs(log_type, crawler)
  57. Common.logger(log_type, crawler).info("抓取完一轮\n")
  58. if __name__ == "__main__":
  59. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  60. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  61. parser.add_argument('--crawler') ## 添加参数
  62. parser.add_argument('--task') ## 添加参数
  63. parser.add_argument('--env') ## 添加参数
  64. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  65. main(log_type=args.log_type,
  66. crawler=args.crawler,
  67. task=args.task,
  68. env=args.env)