import time from gzh_spider.api import MysqlHelper, Feishu class TokenApi: @classmethod def get_token(cls, log_type, crawler, token_index, env): select_sql = f""" select * from crawler_config where source="{crawler}" and title LIKE "%公众号_{token_index}%";""" configs = MysqlHelper.get_values(log_type, crawler, select_sql, env, action="") if len(configs) == 0: Feishu.bot(log_type, crawler, f"公众号_{token_index}:未配置token") time.sleep(60) return None token_dict = { "token_id": configs[0]["id"], "title": configs[0]["title"].strip(), "token": dict(eval(configs[0]["config"]))["token"].strip(), "cookie": dict(eval(configs[0]["config"]))["cookie"].strip(), "update_time": time.strftime( "%Y-%m-%d %H:%M:%S", time.localtime(int(configs[0]["update_time"] / 1000)), ), "operator": configs[0]["operator"].strip(), } # for k, v in token_dict.items(): # print(f"{k}:{type(v)}, {v}") return token_dict