|
@@ -12,77 +12,6 @@ from main.feishu_lib import Feishu
|
|
|
|
|
|
|
|
|
class Bot:
|
|
|
- # # 获取各个爬虫的 feeds 表
|
|
|
- # @classmethod
|
|
|
- # def get_feeds_sheet(cls, log_type, crawler, sheet):
|
|
|
- # try:
|
|
|
- # if crawler == "kanyikan" and sheet == "recommend":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "kanyikan", "SdCHOM")
|
|
|
- # elif crawler == "kanyikan" and sheet == "moment":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "kanyikan", "tGqZMX")
|
|
|
- # elif crawler == "xiaoniangao" and sheet == "hour":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "ba0da4")
|
|
|
- # elif crawler == "xiaoniangao" and sheet == "person":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "k6ldje")
|
|
|
- # elif crawler == "music_album" and sheet == "recommend":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "music_album", "69UxPo")
|
|
|
- # elif crawler == "bszf" and sheet == "recommend":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "bszf", "CcHgO7")
|
|
|
- # elif crawler == "kuaishou" and sheet == "recommend":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "kuaishou", "JK6npf")
|
|
|
- # elif crawler == "kuaishou" and sheet == "follow":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "kuaishou", "wW5cyb")
|
|
|
- # elif crawler == "gzh" and sheet == "recommend":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
|
|
|
- # elif crawler == "weishi" and sheet == "recommend":
|
|
|
- # feeds_sheet = Feishu.get_values_batch(log_type, "weishi", "O7fCzr")
|
|
|
- # else:
|
|
|
- # feeds_sheet = "请输入{crawler}和{sheet}"
|
|
|
- #
|
|
|
- # return feeds_sheet
|
|
|
- # except Exception as e:
|
|
|
- # Common.logger(log_type).error("get_feeds_sheet异常:{}", e)
|
|
|
- #
|
|
|
- # # feeds_sheet表报警:连续 2 小时无数据
|
|
|
- # @classmethod
|
|
|
- # def rebot_feeds_sheet(cls, log_type, crawler, sheet):
|
|
|
- # """
|
|
|
- # 每隔一分钟获取一次表数据的数量:
|
|
|
- # 1.中途有数据时,退出此次监控
|
|
|
- # 2.连续2小时无数据时,触发机器人报警
|
|
|
- # """
|
|
|
- # # kanyikan_recommend_sheet = Feishu.get_values_batch(log_type, "kanyikan", "SdCHOM")
|
|
|
- # # kanyikan_moment_sheet = Feishu.get_values_batch(log_type, "kanyikan", "tGqZMX")
|
|
|
- # # xiaoniangao_hour_sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "ba0da4")
|
|
|
- # # xiaoniangao_person_sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "k6ldje")
|
|
|
- # # music_album_recommend_sheet = Feishu.get_values_batch(log_type, "music_album", "69UxPo")
|
|
|
- # # bszf_recommend_sheet = Feishu.get_values_batch(log_type, "bszf", "CcHgO7")
|
|
|
- # # kuaishou_recommend_sheet = Feishu.get_values_batch(log_type, "kuaishou", "JK6npf")
|
|
|
- # # kuaishou_follow_sheet = Feishu.get_values_batch(log_type, "kuaishou", "wW5cyb")
|
|
|
- # # gzh_recommend_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
|
|
|
- #
|
|
|
- # for i in range(120):
|
|
|
- # if len(cls.get_feeds_sheet(log_type, crawler, sheet)) > 1:
|
|
|
- # break
|
|
|
- # else:
|
|
|
- # time.sleep(60)
|
|
|
- # if i == 119 and crawler == "kanyikan" and sheet == "recommend":
|
|
|
- # Feishu.bot(log_type, "kanyikan", "看一看推荐榜表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "kanyikan" and sheet == "moment":
|
|
|
- # Feishu.bot(log_type, "kanyikan", "看一看朋友圈表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "xiaoniangao" and sheet == "person":
|
|
|
- # Feishu.bot(log_type, "xiaoniangao", "小年糕用户主页表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "music_album" \
|
|
|
- # and sheet == "recommend" and datetime.datetime.now().hour < 13:
|
|
|
- # Feishu.bot(log_type, "music_album", "音乐相册推荐表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "bszf" and sheet == "recommend" and datetime.datetime.now().hour < 13:
|
|
|
- # Feishu.bot(log_type, "bszf", "本山祝福推荐表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "kuaishou" and sheet == "recommend":
|
|
|
- # Feishu.bot(log_type, "kuaishou", "快手推荐表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "kuaishou" and sheet == "follow":
|
|
|
- # Feishu.bot(log_type, "kuaishou", "快手关注表,已经 2 小时无数据了😤")
|
|
|
- # elif i == 119 and crawler == "gzh" and sheet == "recommend":
|
|
|
- # Feishu.bot(log_type, "gzh", "公众号推荐表,已经 2 小时无数据了😤")
|
|
|
|
|
|
# 获取各个爬虫表最新一条抓取时间
|
|
|
@classmethod
|
|
@@ -138,6 +67,12 @@ class Bot:
|
|
|
first_download_time = sheet[1][5]
|
|
|
first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
|
|
|
|
|
|
+ elif crawler == 'zhufumao':
|
|
|
+ sheet = Feishu.get_values_batch(log_type, "zhufumao", "e13bdf")
|
|
|
+ # 已下载表,最新一条视频抓取时间
|
|
|
+ first_download_time = sheet[1][5]
|
|
|
+ first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
|
|
|
+
|
|
|
elif crawler == 'ssnnyfq':
|
|
|
sheet = Feishu.get_values_batch(log_type, "ssnnyfq", "290bae")
|
|
|
# 已下载表,最新一条视频抓取时间
|
|
@@ -205,7 +140,7 @@ class Bot:
|
|
|
|
|
|
return first_download_time
|
|
|
except Exception as e:
|
|
|
- Common.logger(log_type).error("get_first_time异常:{}\n", e)
|
|
|
+ Common.logger(log_type).error(f"get_first_time异常:{e}\n")
|
|
|
|
|
|
# 触发机器人报警:超过24小时没有新入库的视频
|
|
|
@classmethod
|
|
@@ -265,6 +200,12 @@ class Bot:
|
|
|
Feishu.bot(log_type, crawler, "众妙音信_已下载表,超过24小时没有新视频入库了😤")
|
|
|
Common.logger(log_type).warning("众妙音信_已下载表,超过24小时没有新视频入库了😤\n")
|
|
|
|
|
|
+ # 祝福猫视频
|
|
|
+ elif crawler == "zhufumao" and (
|
|
|
+ int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
|
|
|
+ Feishu.bot(log_type, crawler, "祝福猫_已下载表,超过24小时没有新视频入库了😤")
|
|
|
+ Common.logger(log_type).warning("祝福猫_已下载表,超过24小时没有新视频入库了😤\n")
|
|
|
+
|
|
|
# 岁岁年年迎福气
|
|
|
elif crawler == "ssnnyfq" and (
|
|
|
int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
|
|
@@ -283,10 +224,12 @@ class Bot:
|
|
|
Common.logger(log_type).warning("本山祝福已下载表,超过24小时没有新视频入库了😤\n")
|
|
|
|
|
|
# 快手爬虫报警
|
|
|
- elif crawler == "kuaishou_recommend" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
|
|
|
+ elif crawler == "kuaishou_recommend" and (
|
|
|
+ int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
|
|
|
Feishu.bot(log_type, crawler, "快手_推荐榜_已下载表,超过24小时没有新视频入库了😤")
|
|
|
Common.logger(log_type).warning("快手_推荐榜_已下载表,超过24小时没有新视频入库了😤\n")
|
|
|
- elif crawler == "kuaishou_follow" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
|
|
|
+ elif crawler == "kuaishou_follow" and (
|
|
|
+ int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
|
|
|
Feishu.bot(log_type, crawler, "快手_用户主页_已下载表,超过24小时没有新视频入库了😤")
|
|
|
Common.logger(log_type).warning("快手_用户主页_已下载表,超过24小时没有新视频入库了😤\n")
|
|
|
|
|
@@ -311,7 +254,7 @@ class Bot:
|
|
|
Common.logger(log_type).warning("视频号已下载表,超过24小时没有新视频入库了😤\n")
|
|
|
|
|
|
except Exception as e:
|
|
|
- Common.logger(log_type).error("robot_alarm异常:{}", e)
|
|
|
+ Common.logger(log_type).error(f"robot_alarm异常:{e}\n")
|
|
|
|
|
|
# 监控运行入口
|
|
|
@classmethod
|
|
@@ -347,6 +290,9 @@ class Bot:
|
|
|
Common.logger("bot").info("监控众妙音信已下载表")
|
|
|
Bot.robot_download_sheet("bot", "zmyx", duration)
|
|
|
|
|
|
+ Common.logger("bot").info("监控祝福猫已下载表")
|
|
|
+ Bot.robot_download_sheet("bot", "zhufumao", duration)
|
|
|
+
|
|
|
Common.logger("bot").info("监控岁岁年年迎福气已下载表")
|
|
|
Bot.robot_download_sheet("bot", "ssnnyfq", duration)
|
|
|
|
|
@@ -373,15 +319,15 @@ class Bot:
|
|
|
# Bot.robot_download_sheet("bot", "weiqun", duration)
|
|
|
|
|
|
Common.del_logs("bot")
|
|
|
- Common.logger("bot").info("休眠{}小时", 24-datetime.datetime.now().hour)
|
|
|
+ Common.logger("bot").info(f"休眠{24-datetime.datetime.now().hour}小时")
|
|
|
time.sleep(3600 * (24-datetime.datetime.now().hour))
|
|
|
else:
|
|
|
pass
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
- # Bot.robot_download_sheet("bot", "kuaishou_recommend", 1)
|
|
|
- # Bot.robot_download_sheet("bot", "ssnnyfq", 1)
|
|
|
+
|
|
|
+ # Bot.robot_download_sheet("bot", "zhufumao", 1)
|
|
|
Bot.main()
|
|
|
|
|
|
pass
|