run_bot.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/9
  4. # import datetime
  5. import datetime
  6. import os
  7. import sys
  8. import time
  9. sys.path.append(os.getcwd())
  10. from main.common import Common
  11. from main.feishu_lib import Feishu
  12. class Bot:
  13. # 获取各个爬虫表最新一条抓取时间
  14. @classmethod
  15. def get_first_time(cls, log_type, crawler):
  16. try:
  17. if crawler == "xiaoniangao_hour":
  18. sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "yatRv2")
  19. # 已下载表,最新一条视频抓取时间
  20. first_download_time = sheet[1][5]
  21. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y-%m-%d %H:%M:%S")))
  22. elif crawler == "xiaoniangao_person":
  23. sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "Wu0CeL")
  24. # 已下载表,最新一条视频抓取时间
  25. first_download_time = sheet[1][5]
  26. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y-%m-%d %H:%M:%S")))
  27. elif crawler == "xiaoniangao_play":
  28. sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "c85k1C")
  29. # 已下载表,最新一条视频抓取时间
  30. first_download_time = sheet[1][5]
  31. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y-%m-%d %H:%M:%S")))
  32. elif crawler == 'xigua_video':
  33. sheet = Feishu.get_values_batch(log_type, "xigua", "e075e9")
  34. # 已下载表,最新一条视频抓取时间
  35. first_download_time = sheet[1][5]
  36. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y-%m-%d %H:%M:%S")))
  37. elif crawler == 'xigua_little_video':
  38. sheet = Feishu.get_values_batch(log_type, "xigua", "hDSDnv")
  39. # 已下载表,最新一条视频抓取时间
  40. first_download_time = sheet[1][5]
  41. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  42. elif crawler == 'zhihu_hot':
  43. sheet = Feishu.get_values_batch(log_type, "zhihu", "8871e3")
  44. # 已下载表,最新一条视频抓取时间
  45. first_download_time = sheet[1][5]
  46. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  47. elif crawler == 'zhihu_follow':
  48. sheet = Feishu.get_values_batch(log_type, "zhihu", "4MGuux")
  49. # 已下载表,最新一条视频抓取时间
  50. first_download_time = sheet[1][5]
  51. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  52. elif crawler == 'haokan_hot':
  53. sheet = Feishu.get_values_batch(log_type, "haokan", "5pWipX")
  54. # 已下载表,最新一条视频抓取时间
  55. first_download_time = sheet[1][5]
  56. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  57. elif crawler == 'haokan_channel':
  58. sheet = Feishu.get_values_batch(log_type, "haokan", "7f05d8")
  59. # 已下载表,最新一条视频抓取时间
  60. first_download_time = sheet[1][5]
  61. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  62. elif crawler == 'haokan_follow':
  63. sheet = Feishu.get_values_batch(log_type, "haokan", "kVaSjf")
  64. # 已下载表,最新一条视频抓取时间
  65. first_download_time = sheet[1][5]
  66. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  67. elif crawler == 'ssyy':
  68. sheet = Feishu.get_values_batch(log_type, "ssyy", "59f11d")
  69. # 已下载表,最新一条视频抓取时间
  70. first_download_time = sheet[1][5]
  71. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  72. elif crawler == 'ggdc':
  73. sheet = Feishu.get_values_batch(log_type, 'ggdc', '070a67')
  74. # 已下载表,最新一条视频抓取时间
  75. first_download_time = sheet[1][5]
  76. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  77. elif crawler == 'jxxf':
  78. sheet = Feishu.get_values_batch(log_type, "jxxf", "d9e9b1")
  79. # 已下载表,最新一条视频抓取时间
  80. first_download_time = sheet[1][5]
  81. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  82. elif crawler == 'zmyx':
  83. sheet = Feishu.get_values_batch(log_type, "zmyx", "19c772")
  84. # 已下载表,最新一条视频抓取时间
  85. first_download_time = sheet[1][5]
  86. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  87. elif crawler == 'zhufumao':
  88. sheet = Feishu.get_values_batch(log_type, "zhufumao", "e13bdf")
  89. # 已下载表,最新一条视频抓取时间
  90. first_download_time = sheet[1][5]
  91. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  92. elif crawler == 'ssnnyfq':
  93. sheet = Feishu.get_values_batch(log_type, "ssnnyfq", "290bae")
  94. # 已下载表,最新一条视频抓取时间
  95. first_download_time = sheet[1][5]
  96. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  97. elif crawler == 'kdjsfq':
  98. sheet = Feishu.get_values_batch(log_type, "kdjsfq", "ad3b6d")
  99. # 已下载表,最新一条视频抓取时间
  100. first_download_time = sheet[1][5]
  101. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  102. elif crawler == 'gongzhonghao_xinxin':
  103. sheet = Feishu.get_values_batch(log_type, "gongzhonghao_xinxin", "47e39d")
  104. # 已下载表,最新一条视频抓取时间
  105. first_download_time = sheet[1][5]
  106. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  107. elif crawler == "kanyikan":
  108. sheet = Feishu.get_values_batch(log_type, "kanyikan", "20ce0c")
  109. # 已下载表,最新一条视频抓取时间
  110. first_download_time = sheet[1][5]
  111. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  112. elif crawler == "music_album":
  113. sheet = Feishu.get_values_batch(log_type, "music_album", "f5a76e")
  114. # 已下载表,最新一条视频抓取时间
  115. first_download_time = sheet[1][5]
  116. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  117. elif crawler == "bszf":
  118. sheet = Feishu.get_values_batch(log_type, "bszf", "440018")
  119. # 已下载表,最新一条视频抓取时间
  120. first_download_time = sheet[1][4]
  121. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  122. elif crawler == "kuaishou_recommend":
  123. sheet = Feishu.get_values_batch(log_type, "kuaishou", "3cd128")
  124. # 已下载表,最新一条视频抓取时间
  125. first_download_time = sheet[1][5]
  126. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  127. elif crawler == "kuaishou_follow":
  128. sheet = Feishu.get_values_batch(log_type, "kuaishou", "fYdA8F")
  129. # 已下载表,最新一条视频抓取时间
  130. first_download_time = sheet[1][5]
  131. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  132. elif crawler == "gzh":
  133. sheet = Feishu.get_values_batch(log_type, "gzh", "fCs3BT")
  134. # 已下载表,最新一条视频抓取时间
  135. first_download_time = sheet[1][3]
  136. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  137. elif crawler == "weiqun":
  138. sheet = Feishu.get_values_batch(log_type, "weiqun", "3cd128")
  139. # 已下载表,最新一条视频抓取时间
  140. first_download_time = sheet[1][5]
  141. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  142. elif crawler == "weishi":
  143. sheet = Feishu.get_values_batch(log_type, "weishi", "caa3fa")
  144. # 已下载表,最新一条视频抓取时间
  145. first_download_time = sheet[1][5]
  146. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  147. elif crawler == "shipinhao_recommend":
  148. sheet = Feishu.get_values_batch(log_type, "shipinhao", "c77cf9")
  149. # 已下载表,最新一条视频抓取时间
  150. first_download_time = sheet[1][5]
  151. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  152. elif crawler == "shipinhao_follow":
  153. sheet = Feishu.get_values_batch(log_type, "shipinhao", "KsVtLe")
  154. # 已下载表,最新一条视频抓取时间
  155. first_download_time = sheet[1][5]
  156. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  157. elif crawler == "zongjiao":
  158. sheet = Feishu.get_values_batch(log_type, "zongjiao", "xf9wC2")
  159. # 已下载表,最新一条视频抓取时间
  160. first_download_time = sheet[1][5]
  161. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  162. elif crawler == "youtube":
  163. sheet = Feishu.get_values_batch(log_type, "youtube", "GVxlYk")
  164. # 已下载表,最新一条视频抓取时间
  165. first_download_time = sheet[1][5]
  166. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y-%m-%d %H:%M:%S")))
  167. else:
  168. sheet = Feishu.get_values_batch(log_type, "xiaoniangao", "yatRv2")
  169. # 已下载表,最新一条视频抓取时间
  170. first_download_time = sheet[1][5]
  171. first_download_time = int(time.mktime(time.strptime(first_download_time, "%Y/%m/%d %H:%M:%S")))
  172. return first_download_time
  173. except Exception as e:
  174. Common.logger(log_type).error(f"get_first_time异常:{e}\n")
  175. # 触发机器人报警:超过24小时没有新入库的视频
  176. @classmethod
  177. def robot_download_sheet(cls, log_type, crawler, duration):
  178. """
  179. 已下载视频表:超过24小时没有新入库的视频
  180. """
  181. try:
  182. # 看一看爬虫报警
  183. if crawler == "kanyikan" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  184. Feishu.bot(log_type, crawler, "看一看已下载表,超过24小时没有新视频入库了😤")
  185. Common.logger(log_type).warning("看一看已下载表,超过24小时没有新视频入库了😤\n")
  186. # 小年糕爬虫报警
  187. elif crawler == "xiaoniangao_hour" and (
  188. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  189. Feishu.bot(log_type, crawler, "小年糕_小时级_已下载表,超过24小时没有新视频入库了😤")
  190. Common.logger(log_type).warning("小年糕_小时级_已下载表,超过24小时没有新视频入库了😤\n")
  191. elif crawler == "xiaoniangao_person" and (
  192. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  193. Feishu.bot(log_type, crawler, "小年糕_用户主页_已下载表,超过24小时没有新视频入库了😤")
  194. Common.logger(log_type).warning("小年糕_用户主页_已下载表,超过24小时没有新视频入库了😤\n")
  195. elif crawler == "xiaoniangao_play" and (
  196. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  197. Feishu.bot(log_type, crawler, "小年糕_播放量_已下载表,超过24小时没有新视频入库了😤")
  198. Common.logger(log_type).warning("小年糕_播放量_已下载表,超过24小时没有新视频入库了😤\n")
  199. # 西瓜视频
  200. elif crawler == "xigua_video" and (
  201. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  202. Feishu.bot(log_type, crawler, "西瓜视频_用户主页_已下载表,超过24小时没有新视频入库了😤")
  203. Common.logger(log_type).warning("西瓜视频_用户主页_已下载表,超过24小时没有新视频入库了😤\n")
  204. elif crawler == "xigua_little_video" and (
  205. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  206. Feishu.bot(log_type, crawler, "西瓜视频_小视频_已下载表,超过24小时没有新视频入库了😤")
  207. Common.logger(log_type).warning("西瓜视频_小视频_已下载表,超过24小时没有新视频入库了😤\n")
  208. # 知乎
  209. elif crawler == "zhihu_hot" and (
  210. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  211. Feishu.bot(log_type, crawler, "知乎_热门_已下载表,超过24小时没有新视频入库了😤")
  212. Common.logger(log_type).warning("知乎_热门_已下载表,超过24小时没有新视频入库了😤\n")
  213. elif crawler == "zhihu_follow" and (
  214. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  215. Feishu.bot(log_type, crawler, "知乎_定向_已下载表,超过24小时没有新视频入库了😤")
  216. Common.logger(log_type).warning("知乎_定向_已下载表,超过24小时没有新视频入库了😤\n")
  217. # 好看视频
  218. elif crawler == "haokan_hot" and (
  219. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  220. Feishu.bot(log_type, crawler, "好看_热榜_已下载表,超过24小时没有新视频入库了😤")
  221. Common.logger(log_type).warning("好看_热榜_已下载表,超过24小时没有新视频入库了😤\n")
  222. elif crawler == "haokan_channel" and (
  223. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  224. Feishu.bot(log_type, crawler, "好看_频道_已下载表,超过24小时没有新视频入库了😤")
  225. Common.logger(log_type).warning("好看_频道_已下载表,超过24小时没有新视频入库了😤\n")
  226. elif crawler == "haokan_follow" and (
  227. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  228. Feishu.bot(log_type, crawler, "好看_定向_已下载表,超过24小时没有新视频入库了😤")
  229. Common.logger(log_type).warning("好看_定向_已下载表,超过24小时没有新视频入库了😤\n")
  230. # 胜胜影音
  231. elif crawler == "ssyy" and (
  232. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  233. Feishu.bot(log_type, crawler, "胜胜影音_已下载表,超过24小时没有新视频入库了😤")
  234. Common.logger(log_type).warning("胜胜影音_已下载表,超过24小时没有新视频入库了😤\n")
  235. # 刚刚都传
  236. elif crawler == "ggdc" and (
  237. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  238. Feishu.bot(log_type, crawler, "刚刚都传_已下载表,超过24小时没有新视频入库了😤")
  239. Common.logger(log_type).warning("刚刚都传_已下载表,超过24小时没有新视频入库了😤\n")
  240. # 吉祥幸福
  241. elif crawler == "jxxf" and (
  242. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  243. Feishu.bot(log_type, crawler, "吉祥幸福_已下载表,超过24小时没有新视频入库了😤")
  244. Common.logger(log_type).warning("吉祥幸福_已下载表,超过24小时没有新视频入库了😤\n")
  245. # 众妙音信
  246. elif crawler == "zmyx" and (
  247. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  248. Feishu.bot(log_type, crawler, "众妙音信_已下载表,超过24小时没有新视频入库了😤")
  249. Common.logger(log_type).warning("众妙音信_已下载表,超过24小时没有新视频入库了😤\n")
  250. # 祝福猫视频
  251. elif crawler == "zhufumao" and (
  252. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  253. Feishu.bot(log_type, crawler, "祝福猫_已下载表,超过24小时没有新视频入库了😤")
  254. Common.logger(log_type).warning("祝福猫_已下载表,超过24小时没有新视频入库了😤\n")
  255. # 岁岁年年迎福气
  256. elif crawler == "ssnnyfq" and (
  257. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  258. Feishu.bot(log_type, crawler, "岁岁年年迎福气_已下载表,超过24小时没有新视频入库了😤")
  259. Common.logger(log_type).warning("岁岁年年迎福气_已下载表,超过24小时没有新视频入库了😤\n")
  260. # 看到就是福气
  261. elif crawler == "kdjsfq" and (
  262. int(time.time()) - int(cls.get_first_time(log_type, crawler)) > int(duration)):
  263. Feishu.bot(log_type, crawler, "看到就是福气_已下载表,超过24小时没有新视频入库了😤")
  264. Common.logger(log_type).warning("看到就是福气_已下载表,超过24小时没有新视频入库了😤\n")
  265. # 公众号_信欣_爬虫
  266. elif crawler == "gongzhonghao_xinxin" and (
  267. int(time.time()) - int(cls.get_first_time(log_type, crawler)) > int(duration)):
  268. Feishu.bot(log_type, crawler, "公众号_信欣_已下载表,超过24小时没有新视频入库了😤")
  269. Common.logger(log_type).warning("公众号_信欣_已下载表,超过24小时没有新视频入库了😤\n")
  270. # 音乐相册爬虫报警
  271. elif crawler == "music_album" and (
  272. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  273. Feishu.bot(log_type, crawler, "音乐相册已下载表,超过24小时没有新视频入库了😤")
  274. Common.logger(log_type).warning("音乐相册已下载表,超过24小时没有新视频入库了😤\n")
  275. # 本山祝福爬虫报警
  276. elif crawler == "bszf" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  277. Feishu.bot(log_type, crawler, "本山祝福已下载表,超过24小时没有新视频入库了😤")
  278. Common.logger(log_type).warning("本山祝福已下载表,超过24小时没有新视频入库了😤\n")
  279. # 快手爬虫报警
  280. elif crawler == "kuaishou_recommend" and (
  281. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  282. Feishu.bot(log_type, crawler, "快手_推荐榜_已下载表,超过24小时没有新视频入库了😤")
  283. Common.logger(log_type).warning("快手_推荐榜_已下载表,超过24小时没有新视频入库了😤\n")
  284. elif crawler == "kuaishou_follow" and (
  285. int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  286. Feishu.bot(log_type, crawler, "快手_用户主页_已下载表,超过24小时没有新视频入库了😤")
  287. Common.logger(log_type).warning("快手_用户主页_已下载表,超过24小时没有新视频入库了😤\n")
  288. # 公众号爬虫报警
  289. elif crawler == "gzh" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  290. Feishu.bot(log_type, crawler, "公众号已下载表,超过24小时没有新视频入库了😤")
  291. Common.logger(log_type).warning("公众号已下载表,超过24小时没有新视频入库了😤\n")
  292. # 微群视频爬虫报警
  293. elif crawler == "weiqun" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  294. Feishu.bot(log_type, crawler, "微群视频已下载表,超过24小时没有新视频入库了😤")
  295. Common.logger(log_type).warning("微群视频已下载表,超过24小时没有新视频入库了😤\n")
  296. # 微视爬虫报警
  297. elif crawler == "weishi" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  298. Feishu.bot(log_type, crawler, "微视已下载表,超过24小时没有新视频入库了😤")
  299. Common.logger(log_type).warning("微视已下载表,超过24小时没有新视频入库了😤\n")
  300. # 视频号爬虫报警
  301. elif crawler == "shipinhao_recommend" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  302. Feishu.bot(log_type, crawler, "视频号_推荐_已下载表,超过24小时没有新视频入库了😤")
  303. Common.logger(log_type).warning("视频号_推荐_已下载表,超过24小时没有新视频入库了😤\n")
  304. elif crawler == "shipinhao_follow" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  305. Feishu.bot(log_type, crawler, "视频号_定向_已下载表,超过24小时没有新视频入库了😤")
  306. Common.logger(log_type).warning("视频号_定向_已下载表,超过24小时没有新视频入库了😤\n")
  307. # 宗教公众号爬虫报警
  308. elif crawler == "zongjiao" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  309. Feishu.bot(log_type, crawler, "宗教公众号已下载表,超过24小时没有新视频入库了😤")
  310. Common.logger(log_type).warning("宗教公众号已下载表,超过24小时没有新视频入库了😤\n")
  311. # youtube
  312. elif crawler == "youtube" and (int(time.time()) - cls.get_first_time(log_type, crawler) > int(duration)):
  313. Feishu.bot(log_type, crawler, "youtube_定向_已下载表,超过24小时没有新视频入库了😤")
  314. Common.logger(log_type).warning("youtube_定向_已下载表,超过24小时没有新视频入库了😤\n")
  315. except Exception as e:
  316. Common.logger(log_type).error(f"robot_alarm异常:{e}\n")
  317. # 监控运行入口
  318. @classmethod
  319. def main(cls):
  320. """
  321. 每隔六小时,检查一次已下载表;
  322. 已下载表的最新一条数据抓取时间,距当前时间超过 24 小时,则触发机器人报警,发送飞书报警消息
  323. """
  324. # 已下载表,超过 24 小时无新视频入库报警
  325. duration = 3600 * 24
  326. while True:
  327. if datetime.datetime.now().hour == 10:
  328. Common.logger("bot").info("监控看一看已下载表")
  329. Bot.robot_download_sheet("bot", "kanyikan", duration)
  330. Common.logger("bot").info("监控小年糕已下载表")
  331. Bot.robot_download_sheet("bot", "xiaoniangao_hour", duration)
  332. Bot.robot_download_sheet("bot", "xiaoniangao_person", duration)
  333. Bot.robot_download_sheet("bot", "xiaoniangao_play", duration)
  334. # Common.logger('bot').info('监控知乎已下载表')
  335. # Bot.robot_download_sheet("bot", "zhihu_hot", duration)
  336. # Bot.robot_download_sheet("bot", "zhihu_follow", duration)
  337. # Common.logger('bot').info('监控好看已下载表')
  338. # Bot.robot_download_sheet("bot", "haokan_hot", duration)
  339. # Bot.robot_download_sheet("bot", "haokan_channel", duration)
  340. # Bot.robot_download_sheet("bot", "haokan_follow", duration)
  341. # Common.logger("bot").info("监控吉祥幸福已下载表")
  342. # Bot.robot_download_sheet("bot", "jxxf", duration)
  343. # Common.logger("bot").info("监控岁岁年年迎福气已下载表")
  344. # Bot.robot_download_sheet("bot", "ssnnyfq", duration)
  345. # Common.logger("bot").info("监控看到就是福气已下载表")
  346. # Bot.robot_download_sheet("bot", "kdjsfq", duration)
  347. Common.logger("bot").info("监控本山祝福已下载表")
  348. Bot.robot_download_sheet("bot", "bszf", duration)
  349. Common.logger("bot").info("监控快手已下载表")
  350. # Bot.robot_download_sheet("bot", "kuaishou_recommend", duration)
  351. Bot.robot_download_sheet("bot", "kuaishou_follow", duration)
  352. # Common.logger("bot").info("监控微视已下载表")
  353. # Bot.robot_download_sheet("bot", "weishi", duration)
  354. Common.logger("bot").info("监控公众号_信欣_已下载表")
  355. Bot.robot_download_sheet("bot", "gongzhonghao_xinxin", duration)
  356. # Common.logger("bot").info("监控视频号已下载表")
  357. # Bot.robot_download_sheet("bot", "shipinhao_follow", duration)
  358. # Common.logger("bot").info("监控宗教公众号已下载表")
  359. # Bot.robot_download_sheet("bot", "zongjiao", duration)
  360. # Common.logger("bot").info("刚刚都传已下载表")
  361. # Bot.robot_download_sheet("bot", "ggdc", duration)
  362. Common.logger("bot").info("youtube_定向_已下载表")
  363. Bot.robot_download_sheet("bot", "youtube", duration)
  364. # Common.logger("bot").info("监控众妙音信已下载表")
  365. # Bot.robot_download_sheet("bot", "zmyx", duration)
  366. # Common.logger("bot").info("监控祝福猫已下载表")
  367. # Bot.robot_download_sheet("bot", "zhufumao", duration)
  368. # Common.logger("bot").info("监控西瓜视频已下载表")
  369. # Bot.robot_download_sheet("bot", "xigua_video", duration)
  370. # Bot.robot_download_sheet("bot", "xigua_little_video", duration)
  371. # Common.logger("bot").info("监控胜胜影音已下载表")
  372. # Bot.robot_download_sheet("bot", "ssyy", duration)
  373. # Common.logger("bot").info("监控公众号已下载表")
  374. # Bot.robot_download_sheet("bot", "gzh", duration)
  375. # Common.logger("bot").info("监控音乐相册已下载表")
  376. # Bot.robot_download_sheet("bot", "music_album", duration)
  377. # Common.logger("bot").info("监控微群视频已下载表")
  378. # Bot.robot_download_sheet("bot", "weiqun", duration)
  379. Common.del_logs("bot")
  380. Common.logger("bot").info(f"休眠{24-datetime.datetime.now().hour}小时")
  381. time.sleep(3600 * (24-datetime.datetime.now().hour))
  382. else:
  383. pass
  384. if __name__ == "__main__":
  385. # Bot.robot_download_sheet("bot", "youtube", 1)
  386. Bot.main()
  387. pass