token_api.py 1.1 KB

123456789101112131415161718192021222324252627
  1. import time
  2. from gzh_spider.api import MysqlHelper, Feishu
  3. class TokenApi:
  4. @classmethod
  5. def get_token(cls, log_type, crawler, token_index, env):
  6. select_sql = f""" select * from crawler_config where source="{crawler}" and title LIKE "%公众号_{token_index}%";"""
  7. configs = MysqlHelper.get_values(log_type, crawler, select_sql, env, action="")
  8. if len(configs) == 0:
  9. Feishu.bot(log_type, crawler, f"公众号_{token_index}:未配置token")
  10. time.sleep(60)
  11. return None
  12. token_dict = {
  13. "token_id": configs[0]["id"],
  14. "title": configs[0]["title"].strip(),
  15. "token": dict(eval(configs[0]["config"]))["token"].strip(),
  16. "cookie": dict(eval(configs[0]["config"]))["cookie"].strip(),
  17. "update_time": time.strftime(
  18. "%Y-%m-%d %H:%M:%S",
  19. time.localtime(int(configs[0]["update_time"] / 1000)),
  20. ),
  21. "operator": configs[0]["operator"].strip(),
  22. }
  23. # for k, v in token_dict.items():
  24. # print(f"{k}:{type(v)}, {v}")
  25. return token_dict