소스 검색

update yestoday values

wangkun 2 년 전
부모
커밋
f3abbefa31
9개의 변경된 파일480개의 추가작업 그리고 189개의 파일을 삭제
  1. 37 4
      main/common.py
  2. 23 8
      main/demo.py
  3. 81 0
      main/download_person.py
  4. 81 96
      main/feishu_lib.py
  5. 197 77
      main/hour_list.py
  6. 4 4
      main/run.py
  7. 51 0
      main/run_hour_list.py
  8. 3 0
      main/run_person.py
  9. 3 0
      person-logs/__init__.py

+ 37 - 4
main/common.py

@@ -52,15 +52,48 @@ class Common:
 
         return logger
 
+    # 使用 logger 模块生成日志
+    @staticmethod
+    def person_logger():
+        """
+        使用 logger 模块生成日志
+        """
+        # 日志路径
+        log_dir = "./person-logs/"
+        log_path = os.getcwd() + os.sep + log_dir
+        if not os.path.isdir(log_path):
+            os.makedirs(log_path)
+
+        # 日志文件名
+        log_name = time.strftime("%Y-%m-%d", time.localtime(time.time())) + '.log'
+
+        # 日志不打印到控制台
+        logger.remove(handler_id=None)
+
+        # rotation="500 MB",实现每 500MB 存储一个文件
+        # rotation="12:00",实现每天 12:00 创建一个文件
+        # rotation="1 week",每周创建一个文件
+        # retention="10 days",每隔10天之后就会清理旧的日志
+        # 初始化日志
+        logger.add(log_dir + log_name, level="INFO", rotation='00:00')
+
+        return logger
+
     # 清除日志,保留最近 7 个文件
     @classmethod
-    def del_logs(cls):
+    def del_logs(cls, d_dir):
         """
         清除冗余日志文件
+        :d_dir: 需要删除的 log 地址
         :return: 保留最近 7 个日志
         """
-        log_dir = "./logs/"
-        all_files = sorted(os.listdir(log_dir))
+        global logs_dir
+        if d_dir == "logs":
+            logs_dir = "./logs/"
+        elif d_dir == "person-logs":
+            logs_dir = "./person-logs/"
+
+        all_files = sorted(os.listdir(logs_dir))
         all_logs = []
         for log in all_files:
             name = os.path.splitext(log)[-1]
@@ -71,7 +104,7 @@ class Common:
             pass
         else:
             for file in all_logs[:len(all_logs) - 7]:
-                os.remove(log_dir + file)
+                os.remove(logs_dir + file)
         cls.logger().info("清除冗余日志成功")
 
     # 封装下载视频或封面的方法

+ 23 - 8
main/demo.py

@@ -4,11 +4,26 @@
 import datetime
 import time
 
-id1 = "4536997774"
-id2 = "627e1e350000010768f89232"
-time1 = 44697
-time2 = time.strftime("%Y-%m-%d", time.localtime(time1))
-time3 = datetime.datetime.now().strftime("%Y-%m-%d")
-print(time3)
-
-print(int("09"))
+# spreadsheetToken = ["shtcngRPoDYAi24x52j2nDuHMih",
+#                     "shtcnp4SaJt37q6OOOrYzPMjQkg",
+#                     "shtcn5YSWg91JfVGzj0SFZIRRPh",
+#                     "shtcnYxiyQ1wLklo1W5Kdqc9cGh"]
+# print(spreadsheetToken[0])
+
+# time1 = datetime.datetime.now().strftime("%Y-%m-%d")
+# time2 = datetime.datetime.now().strftime("%Y")
+# time3 = datetime.datetime.now().strftime("%m")
+# time4 = datetime.datetime.now().strftime("%d")
+# print(type(time1))
+# print(type(time2))
+# print(time3)
+# print(time4)
+yesterday = (datetime.date.today() + datetime.timedelta(days=-2)).strftime("%Y-%m-%d")
+update_hour = datetime.datetime.now().strftime("%Y-%m-%d")
+print(type(yesterday))
+print(yesterday)
+print(type(update_hour))
+print(update_hour)
+
+
+

+ 81 - 0
main/download_person.py

@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+# @Author: wangkun
+# @Time: 2022/5/18
+import time
+
+import requests
+
+from main.common import Common
+from main.feishu_lib import Feishu
+
+
+class Person:
+    # 个人作品列表页
+    @classmethod
+    def get_person_list(cls):
+        try:
+            if len(Feishu.get_values_batch("oNpThi")) == 1:
+                print(len(Feishu.get_values_batch("oNpThi")))
+                print(Feishu.get_values_batch("oNpThi"))
+                Common.person_logger().info("暂无定向爬取账号")
+            else:
+                for i in range(1, len(Feishu.get_values_batch("oNpThi"))+1):
+                    time.sleep(1)
+                    Common.person_logger().info("")
+
+                    url = "https://api.xiaoniangao.cn/profile/list_album"
+                    headers = {
+                        "X-Mid": "1164637358",
+                        "X-Token-Id": "af9c47bb6c942236ff35ee10d355f3b0-1164637358",
+                        "content-type": "application/json",
+                        "uuid": "3d460a1b-ab85-426b-bd80-62029acaa2c0",
+                        "Accept-Encoding": "gzip,compress,br,deflate",
+                        "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X)"
+                                      " AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148 "
+                                      "MicroMessenger/8.0.20(0x18001435) NetType/WIFI Language/zh_CN",
+                        "Referer": "https://servicewechat.com/wxd7911e4c177690e4/617/page-frame.html"
+                    }
+                    data = {
+                        "visited_mid": "260159327",
+                        "qs": "imageMogr2/gravity/center/rotate/$/thumbnail/!690x385r/crop/690x385/interlace/1/format/jpg",
+                        "h_qs": "imageMogr2/gravity/center/rotate/$/thumbnail/!120x120r/crop/120x120/interlace/1/format/jpg",
+                        "limit": 20,
+                        "token": "451273638af2c8bb90266bcfaf601a68",
+                        "uid": "3d460a1b-ab85-426b-bd80-62029acaa2c0",
+                        "proj": "ma",
+                        "wx_ver": "8.0.20",
+                        "code_ver": "3.62.0",
+                        "log_common_params": {
+                            "e": [{
+                                "data": {
+                                    "page": "profilePage",
+                                    "topic": "public"
+                                }
+                            }],
+                            "ext": {
+                                "brand": "iPhone",
+                                "device": "iPhone 11",
+                                "os": "iOS 14.7.1",
+                                "weixinver": "8.0.20",
+                                "srcver": "2.24.2",
+                                "net": "wifi",
+                                "scene": "1089"
+                            },
+                            "pj": "1",
+                            "pf": "2",
+                            "session_id": "ba9b042f-5150-4c3e-a5da-b2fc4181b954"
+                        }
+                    }
+                    r = requests.post(url=url, headers=headers, json=data)
+                    feeds = r.json()["data"]["list"]
+                    # for i in feeds:
+                    #     print(i)
+                    values = [["111", "222", "333", "444"]]
+                    Feishu.update_hour_list_values("yatRv2", "A4:D4", values)
+        except Exception as e:
+            Common.person_logger().info("个人作品列表页异常:{}", e)
+
+
+if __name__ == "__main__":
+    person = Person()
+    person.get_person_list()

+ 81 - 96
main/feishu_lib.py

@@ -17,10 +17,31 @@ class Feishu:
     """
     编辑飞书云文档
     """
-    feishu_url = "https://w42nne6hzg.feishu.cn/sheets/shtcngRPoDYAi24x52j2nDuHMih?"
-    spreadsheetToken = "shtcngRPoDYAi24x52j2nDuHMih"
-
-    # 获取飞书api token
+    # 看一看爬虫数据表
+    kanyikan_url = "https://w42nne6hzg.feishu.cn/sheets/shtcngRPoDYAi24x52j2nDuHMih?"
+    # 快手爬虫数据表
+    kuaishou_url = "https://w42nne6hzg.feishu.cn/sheets/shtcnp4SaJt37q6OOOrYzPMjQkg?"
+    # 微视爬虫数据表
+    weishi_url = "https://w42nne6hzg.feishu.cn/sheets/shtcn5YSWg91JfVGzj0SFZIRRPh?"
+    # 小年糕爬虫数据表
+    xiaoniangao_url = "https://w42nne6hzg.feishu.cn/sheets/shtcnYxiyQ1wLklo1W5Kdqc9cGh?"
+
+    # 飞书路径token
+    @classmethod
+    def spreadsheetToken(cls, crawler):
+        """
+        :param crawler: 哪个爬虫
+        """
+        if crawler == "kanyikan":
+            return "shtcngRPoDYAi24x52j2nDuHMih"
+        elif crawler == "kuaishou":
+            return "shtcnp4SaJt37q6OOOrYzPMjQkg"
+        elif crawler == "weishi":
+            return "shtcn5YSWg91JfVGzj0SFZIRRPh"
+        elif crawler == "xiaoniangao":
+            return "shtcnYxiyQ1wLklo1W5Kdqc9cGh"
+
+            # 获取飞书api token
     @classmethod
     def get_token(cls):
         """
@@ -41,12 +62,14 @@ class Feishu:
 
     # 获取表格元数据
     @classmethod
-    def get_metainfo(cls):
+    def get_metainfo(cls, crawler):
         """
         获取表格元数据
         :return:
         """
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" + cls.spreadsheetToken + "/metainfo"
+        get_metainfo_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                           + cls.spreadsheetToken(crawler) + "/metainfo"
+
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -57,7 +80,7 @@ class Feishu:
         }
         try:
             urllib3.disable_warnings()
-            r = requests.get(url=url, headers=headers, params=params, proxies=proxies, verify=False)
+            r = requests.get(url=get_metainfo_url, headers=headers, params=params, proxies=proxies, verify=False)
             response = json.loads(r.content.decode("utf8"))
             return response
         except Exception as e:
@@ -65,14 +88,15 @@ class Feishu:
 
     # 读取工作表中所有数据
     @classmethod
-    def get_values_batch(cls, sheetid):
+    def get_values_batch(cls, crawler, sheetid):
         """
         读取工作表中所有数据
+        :param crawler: 哪个爬虫
         :param sheetid: 哪张表
         :return: 所有数据
         """
-
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" + cls.spreadsheetToken + "/values_batch_get"
+        get_values_batch_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                               + cls.spreadsheetToken(crawler) + "/values_batch_get"
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -95,25 +119,26 @@ class Feishu:
         }
         try:
             urllib3.disable_warnings()
-            r = requests.get(url=url, headers=headers, params=params, proxies=proxies, verify=False)
+            r = requests.get(url=get_values_batch_url, headers=headers, params=params, proxies=proxies, verify=False)
             response = json.loads(r.content.decode("utf8"))
             values = response["data"]["valueRanges"][0]["values"]
             return values
         except Exception as e:
             Common.logger().error("读取工作表所有数据异常:{}", e)
 
-    # 工作表,插入行
+    # 工作表,插入行或列
     @classmethod
-    def insert_columns(cls, sheetid, majordimension, startindex, endindex):
+    def insert_columns(cls, crawler, sheetid, majordimension, startindex, endindex):
         """
-        工作表插入数据
-        sheetid:哪张工作表
-        majordimension:行或者列
-        startindex:开始位置
-        endindex:结束位置
+        工作表插入行或列
+        :param crawler: 哪个爬虫
+        :param sheetid:哪张工作表
+        :param majordimension:行或者列
+        :param startindex:开始位置
+        :param endindex:结束位置
         """
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
-              + cls.spreadsheetToken + "/insert_dimension_range"
+        insert_columns_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                             + cls.spreadsheetToken(crawler) + "/insert_dimension_range"
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -129,68 +154,23 @@ class Feishu:
         }
         try:
             urllib3.disable_warnings()
-            r = requests.post(url=url, headers=headers, json=body, proxies=proxies, verify=False)
+            r = requests.post(url=insert_columns_url, headers=headers, json=body, proxies=proxies, verify=False)
             Common.logger().info("插入行或列:{}", r.json()["msg"])
         except Exception as e:
             Common.logger().error("插入行或列异常:{}", e)
 
-    # 工作表,首行写入数据
-    @classmethod
-    def update_values(cls, sheetid, a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1, l1, m1, n1, o1):
-        """
-        写入数据
-        :param sheetid: 哪张工作表
-        :param a1: 单元格
-        :param b1: 单元格
-        :param c1: 单元格
-        :param d1: 单元格
-        :param e1: 单元格
-        :param f1: 单元格
-        :param g1: 单元格
-        :param h1: 单元格
-        :param i1: 单元格
-        :param j1: 单元格
-        :param k1: 单元格
-        :param l1: 单元格
-        :param m1: 单元格
-        :param n1: 单元格
-        :param o1: 单元格
-        :return:
-        """
-
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" + cls.spreadsheetToken + "/values_batch_update"
-        headers = {
-            "Authorization": "Bearer " + cls.get_token(),
-            "Content-Type": "application/json; charset=utf-8"
-        }
-        body = {
-            "valueRanges": [
-                {
-                    "range": sheetid + "!A2:O2",
-                    "values": [
-                        [a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1, l1, m1, n1, o1]
-                    ]
-                },
-            ],
-        }
-        try:
-            urllib3.disable_warnings()
-            r = requests.post(url=url, headers=headers, json=body, proxies=proxies, verify=False)
-            Common.logger().info("空行写入视频数据:{}", r.json()["msg"])
-        except Exception as e:
-            Common.logger().error("空行写入视频数据异常:{}", e)
-
     # 写入数据
     @classmethod
-    def update_hour_list_values(cls, sheetid, ranges, values):
+    def update_values(cls, crawler, sheetid, ranges, values):
         """
-        小时榜写入数据
-        sheetid:工作表 ID
-        ranges:单元格范围
-        values:写入的具体数据,list
+        写入数据
+        :param crawler: 哪个爬虫
+        :param sheetid:哪张工作表
+        :param ranges:单元格范围
+        :param values:写入的具体数据,list
         """
-        # 表格 ID
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" + cls.spreadsheetToken + "/values_batch_update"
+        update_values_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                            + cls.spreadsheetToken(crawler) + "/values_batch_update"
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -206,20 +186,22 @@ class Feishu:
 
         try:
             urllib3.disable_warnings()
-            r = requests.post(url=url, headers=headers, json=body, proxies=proxies, verify=False)
-            Common.logger().info("小时榜写入数据:{}", r.json()["msg"])
+            r = requests.post(url=update_values_url, headers=headers, json=body, proxies=proxies, verify=False)
+            Common.logger().info("写入数据:{}", r.json()["msg"])
         except Exception as e:
-            Common.logger().error("小时榜写入数据异常:{}", e)
+            Common.logger().error("写入数据异常:{}", e)
 
     # 合并单元格
     @classmethod
-    def merge_cells(cls, sheetid, ranges):
+    def merge_cells(cls, crawler, sheetid, ranges):
         """
-        合并刚插入的单元格 G1:I1
-        sheetid:哪张工作表
-        ranges:需要合并的单元格范围
+        合并单元格
+        :param crawler: 哪个爬虫
+        :param sheetid:哪张工作表
+        :param ranges:需要合并的单元格范围
         """
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" + cls.spreadsheetToken + "/merge_cells"
+        merge_cells_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                          + cls.spreadsheetToken(crawler) + "/merge_cells"
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -232,22 +214,23 @@ class Feishu:
 
         try:
             urllib3.disable_warnings()
-            r = requests.post(url=url, headers=headers, json=body, proxies=proxies, verify=False)
+            r = requests.post(url=merge_cells_url, headers=headers, json=body, proxies=proxies, verify=False)
             Common.logger().info("合并单元格:{}", r.json()["msg"])
         except Exception as e:
             Common.logger().error("合并单元格异常:{}", e)
 
     # 读取单元格数据
     @classmethod
-    def get_range_value(cls, sheetid, cell):
+    def get_range_value(cls, crawler, sheetid, cell):
         """
         读取单元格内容
+        :param crawler: 哪个爬虫
         :param sheetid: 哪张工作表
         :param cell: 哪个单元格
         :return: 单元格内容
         """
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
-              + cls.spreadsheetToken + "/values/" + sheetid + "!" + cell
+        get_range_value_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                              + cls.spreadsheetToken(crawler) + "/values/" + sheetid + "!" + cell
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -267,24 +250,25 @@ class Feishu:
         }
         try:
             urllib3.disable_warnings()
-            r = requests.get(url=url, headers=headers, params=params, proxies=proxies, verify=False)
-            # print(url)
+            r = requests.get(url=get_range_value_url, headers=headers, params=params, proxies=proxies, verify=False)
             return r.json()["data"]["valueRange"]["values"][0]
         except Exception as e:
             Common.logger().error("读取单元格数据异常:{}", e)
 
     # 删除行或列,可选 ROWS、COLUMNS
     @classmethod
-    def dimension_range(cls, sheetid, major_dimension, startindex, endindex):
+    def dimension_range(cls, crawler, sheetid, major_dimension, startindex, endindex):
         """
         删除行或列
+        :param crawler: 哪个爬虫
         :param sheetid:工作表
         :param major_dimension:默认 ROWS ,可选 ROWS、COLUMNS
         :param startindex:开始的位置
         :param endindex:结束的位置
         :return:
         """
-        url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" + cls.spreadsheetToken + "/dimension_range"
+        dimension_range_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/"\
+                              + cls.spreadsheetToken(crawler) + "/dimension_range"
         headers = {
             "Authorization": "Bearer " + cls.get_token(),
             "Content-Type": "application/json; charset=utf-8"
@@ -299,7 +283,7 @@ class Feishu:
             }
         try:
             urllib3.disable_warnings()
-            r = requests.delete(url=url, headers=headers, json=body, proxies=proxies, verify=False)
+            r = requests.delete(url=dimension_range_url, headers=headers, json=body, proxies=proxies, verify=False)
             Common.logger().info("删除视频数据:{}", r.json()["msg"])
         except Exception as e:
             Common.logger().error("删除视频数据异常:{}", e)
@@ -314,18 +298,19 @@ if __name__ == "__main__":
     # feishu.get_metainfo()
 
     # 读取工作表中所有数据
-    # print(feishu.get_values_batch("k2rKkv"))
+    # print(feishu.get_values_batch("xiaoniangao", "ba0da4"))
     # print(len(feishu.get_values_batch("k2rKkv")))
     # for i in range(3, len(feishu.get_values_batch("k2rKkv"))+1):
     #     print(feishu.get_range_value("k2rKkv", "A" + str(i) + ":" + "A" + str(i))[0])
     #     print(feishu.update_hour_list_values("k2rKkv", "G" + str(i) + ":" + "H" + str(i), [["333"]]))
     #     time.sleep(0.5)
     # feishu.get_range_value("k2rKkv", "F3:F3")[0]
-    print(type(feishu.get_range_value("k2rKkv", "H19:H19")[0].split(" ")[-1].split(":")[0]))
-    print(feishu.get_range_value("k2rKkv", "H19:H19")[0])
-    print(int(feishu.get_range_value("k2rKkv", "H19:H19")[0].split(" ")[-1].split(":")[0]))
+    # print(type(feishu.get_range_value("k2rKkv", "H19:H19")[0].split(" ")[-1].split(":")[0]))
+    # print(feishu.get_range_value("k2rKkv", "H19:H19")[0])
+    # print(int(feishu.get_range_value("xiaoniangao", "ba0da4", "G6:G6")[0].split(" ")[-1].split(":")[0]))
+    # print(feishu.get_range_value("xiaoniangao", "ba0da4", "G6:G6")[0].split(" ")[0])
 
-    feishu.update_hour_list_values("k2rKkv", "H12:H12", [["2022-05-17 15:14:27"]])
+    feishu.update_values("xiaoniangao", "ba0da4", "H4:H4", [["2022-05-18 21:14:27"]])
 
     # 看一看+工作表,插入首行
     # print(feishu.insert_columns("k2rKkv", "COLUMNS", 6, 9))

+ 197 - 77
main/hour_list.py

@@ -3,7 +3,6 @@
 # @Time: 2022/5/16
 import datetime
 import time
-
 import requests
 import urllib3
 
@@ -16,9 +15,10 @@ proxies = {"http": None, "https": None}
 class HourList:
     # 今天的日期:年-月-日
     today = datetime.datetime.now().strftime("%Y-%m-%d")
-
-    # 已下载小时榜视频列表
-    download_hour_video_list = []
+    # 昨天
+    yesterday = (datetime.date.today() + datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
+    # 前天
+    before_yesterday = (datetime.date.today() + datetime.timedelta(days=-2)).strftime("%Y-%m-%d")
 
     # 下载规则
     @staticmethod
@@ -55,13 +55,13 @@ class HourList:
         1.从列表获取视频,7 天内,播放量>=5000
         2.时长 1-10min
         3.每天10:00、15:00、20:00 把符合规则的视频,写入云文档
-        https://w42nne6hzg.feishu.cn/sheets/shtcngRPoDYAi24x52j2nDuHMih?sheet=k2rKkv
+        https://w42nne6hzg.feishu.cn/sheets/shtcnYxiyQ1wLklo1W5Kdqc9cGh?sheet=ba0da4
         """
         url = "https://kapi.xiaoniangao.cn/trends/get_recommend_trends"
         headers = {
-            "x-b3-traceid": "695dcad76ccca",
-            "X-Token-Id": "af9c47bb6c942236ff35ee10d355f3b0-1164637358",
-            "uid": "3d460a1b-ab85-426b-bd80-62029acaa2c0",
+            "x-b3-traceid": "bd267349bf41b",
+            "X-Token-Id": "86f6d7cc2b2b6870004df5d16c82aaf3-1185665701",
+            "uid": "8fde3c6c-c070-4379-bfc4-15c7e85139c9",
             "content-type": "application/json",
             "Accept-Encoding": "gzip,compress,br,deflate",
             "User-Agent": 'Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X)'
@@ -111,11 +111,11 @@ class HourList:
                 },
                 "pj": "1",
                 "pf": "2",
-                "session_id": "1a20f033-3511-4e7b-9b2a-95e5c542461f"
+                "session_id": "7bcce313-b57d-4305-8d14-6ebd9a1bad29"
             },
             "refresh": False,
-            "token": "451273638af2c8bb90266bcfaf601a68",
-            "uid": "3d460a1b-ab85-426b-bd80-62029acaa2c0",
+            "token": "90747742180aeb22c0fe3a3c6a38f3d9",
+            "uid": "8fde3c6c-c070-4379-bfc4-15c7e85139c9",
             "proj": "ma",
             "wx_ver": "8.0.20",
             "code_ver": "3.62.0"
@@ -253,24 +253,23 @@ class HourList:
                             or video_send_time == "" or user_name == "" or head_url == "" \
                             or cover_url == "" or video_url == "":
                         Common.logger().warning("无效视频")
+
                     # 判断发布时间是否 > 7天
-                    # elif int(time.time()) - int(video_send_time)/1000 > 2592000:
-                    #     Common.logger().info("发布时间大于30天", video_title)
                     elif int(time.time()) - int(video_send_time)/1000 > 604800:
                         Common.logger().info("发布时间大于7天", video_title)
+
                     # 判断播放量是否 > 5000
-                    # elif int(video_play_cnt) < 500:
-                    #     Common.logger().info("该视频30天内播放量<500:{}", video_title)
                     elif int(video_play_cnt) < 5000:
                         Common.logger().info("该视频7天内播放量<5000:{}", video_title)
+
                     # 从云文档去重:https://w42nne6hzg.feishu.cn/sheets/shtcngRPoDYAi24x52j2nDuHMih?sheet=onyBDH
-                    elif video_id in [j for i in Feishu.get_values_batch("k2rKkv") for j in i]:
+                    elif video_id in [j for i in Feishu.get_values_batch("xiaoniangao", "ba0da4") for j in i]:
                         Common.logger().info("该视频已保存过:{}", video_title)
                     else:
                         Common.logger().info("该视频未下载,添加至feeds中:{}".format(video_title))
                         # feeds工作表,插入空行
                         time.sleep(1)
-                        Feishu.insert_columns("k2rKkv", "ROWS", 2, 3)
+                        Feishu.insert_columns("xiaoniangao", "ba0da4", "ROWS", 2, 3)
 
                         # 获取当前时间
                         get_feeds_time = int(time.time())
@@ -281,7 +280,7 @@ class HourList:
                                    video_play_cnt]]
                         # 等待 1s,防止操作云文档太频繁,导致报错
                         time.sleep(1)
-                        Feishu.update_hour_list_values("k2rKkv", "A3:I3", values)
+                        Feishu.update_values("xiaoniangao", "ba0da4", "A3:I3", values)
 
         except Exception as e:
             Common.logger().error("获取小时榜视频列表异常:{}", e)
@@ -290,26 +289,33 @@ class HourList:
     @classmethod
     def check_hour_list_data(cls):
         # 判断J1单元格的日期是否为今天
-        if Feishu.get_range_value("k2rKkv", "J1:J1")[0] != cls.today:
+        if Feishu.get_range_value("xiaoniangao", "ba0da4", "J1:J1")[0] != cls.today:
             # 插入3列 J1:L1,并写入日期和时间数据
-            values = [[datetime.datetime.now().strftime("%Y-%m-%d")], ["10:00", "15:00", "20:00"]]
-            Feishu.insert_columns("k2rKkv", "COLUMNS", 9, 12)
-            Feishu.update_hour_list_values("k2rKkv", "J1:L2", values)
-            Feishu.merge_cells("k2rKkv", "J1:L1")
+            values = [[cls.today], ["10:00", "15:00", "20:00"]]
+            Feishu.insert_columns("xiaoniangao", "ba0da4", "COLUMNS", 9, 12)
+            Feishu.update_values("xiaoniangao", "ba0da4",  "J1:L2", values)
+            Feishu.merge_cells("xiaoniangao", "ba0da4", "J1:L1")
             Common.logger().info("插入今天日期成功")
         else:
             Common.logger().info("今日上升榜日期已存在")
 
     # 清除空行
     @classmethod
-    def del_null_rows(cls):
-        for i in range(3, len(Feishu.get_values_batch("k2rKkv")) + 1):
-            time.sleep(0.5)
+    def del_null_rows(cls, crawler, sheetid, startindex):
+        """
+        :params sheetid:工作表 ID
+        :params startindex:从第几行开始清除
+        """
+        for i in range(int(startindex), len(Feishu.get_values_batch(crawler, sheetid)) + 1):
+            time.sleep(1)
             Common.logger().info("正在检查第:{}行", i)
             # 删除空行
-            if Feishu.get_range_value("k2rKkv", "A" + str(i) + ":" + "A" + str(i))[0] is None:
+            if Feishu.get_range_value(crawler, sheetid, "A" + str(i) + ":" + "A" + str(i))[0] is None\
+                    and Feishu.get_range_value(crawler, sheetid, "B" + str(i) + ":" + "B" + str(i))[0] is None\
+                    and Feishu.get_range_value(crawler, sheetid, "C" + str(i) + ":" + "C" + str(i))[0] is None\
+                    and Feishu.get_range_value(crawler, sheetid, "D" + str(i) + ":" + "D" + str(i))[0] is None:
                 Common.logger().info("当前第{}行为空行,删除", i)
-                Feishu.dimension_range("k2rKkv", "ROWS", i, i)
+                Feishu.dimension_range(crawler, sheetid, "ROWS", i, i)
         Common.logger().info("删除空行完成")
 
     # 更新小时榜数据
@@ -319,51 +325,55 @@ class HourList:
         更新小时榜数据
         """
         try:
-            if len(Feishu.get_values_batch("k2rKkv")) == 2:
+            if len(Feishu.get_values_batch("xiaoniangao", "ba0da4")) == 2:
                 Common.logger().info("当前工作表无数据")
             else:
-                for i in range(3, len(Feishu.get_values_batch("k2rKkv"))+1):
-                    time.sleep(0.5)
-                    Common.logger().info("更新第:{}条视频信息", i-2)
+                for i in range(3, len(Feishu.get_values_batch("xiaoniangao", "ba0da4"))+1):
+                    time.sleep(1)
+                    Common.logger().info("更新第:{}行视频信息", i)
 
                     # 略过空行
-                    if Feishu.get_range_value("k2rKkv", "D" + str(i) + ":" + "D" + str(i))[0] is None\
-                            or Feishu.get_range_value("k2rKkv", "C" + str(i) + ":" + "C" + str(i))[0] is None\
-                            or Feishu.get_range_value("k2rKkv", "A" + str(i) + ":" + "A" + str(i))[0] is None:
+                    if Feishu.get_range_value("xiaoniangao", "ba0da4", "D" + str(i) + ":" + "D" + str(i))[0] is None\
+                            and Feishu.get_range_value("xiaoniangao", "ba0da4", "C"+str(i)+":"+"C"+str(i))[0] is None\
+                            and Feishu.get_range_value("xiaoniangao", "ba0da4", "A"+str(i)+":"+"A"+str(i))[0] is None:
                         Common.logger().info("空行,略过")
                     else:
                         # 视频标题
-                        v_title = Feishu.get_range_value("k2rKkv", "D" + str(i) + ":" + "D" + str(i))[0]
+                        v_title = Feishu.get_range_value("xiaoniangao", "ba0da4", "D" + str(i) + ":" + "D" + str(i))[0]
                         Common.logger().info("视频详情,video_title:{},{}", v_title, type(v_title))
 
                         # 视频 ID
-                        v_id = Feishu.get_range_value("k2rKkv", "C" + str(i) + ":" + "C" + str(i))[0]
+                        v_id = Feishu.get_range_value("xiaoniangao", "ba0da4", "C" + str(i) + ":" + "C" + str(i))[0]
                         Common.logger().info("视频详情,video_id:{},{}", v_id, type(v_id))
 
                         # profile_id,用户 ID
-                        p_id = Feishu.get_range_value("k2rKkv", "A" + str(i) + ":" + "A" + str(i))[0]
+                        p_id = Feishu.get_range_value("xiaoniangao", "ba0da4", "A" + str(i) + ":" + "A" + str(i))[0]
                         Common.logger().info("视频详情,profile_id:{},{}", p_id, type(p_id))
 
                         # profile_mid
-                        p_mid = Feishu.get_range_value("k2rKkv", "B" + str(i) + ":" + "B" + str(i))[0]
+                        p_mid = Feishu.get_range_value("xiaoniangao", "ba0da4", "B" + str(i) + ":" + "B" + str(i))[0]
                         Common.logger().info("视频详情,profile_mid:{},{}", p_mid, type(p_mid))
 
                         # 抓取时的播放量
-                        v_play_cnt = Feishu.get_range_value("k2rKkv", "I" + str(i) + ":" + "I" + str(i))[0]
+                        v_play_cnt = Feishu.get_range_value(
+                            "xiaoniangao", "ba0da4", "I" + str(i) + ":" + "I" + str(i))[0]
                         Common.logger().info("视频详情,video_play_cnt:{},{}", v_play_cnt, type(v_play_cnt))
 
                         # 抓取时间
-                        v_upload_time = Feishu.get_range_value("k2rKkv", "H" + str(i) + ":" + "H" + str(i))[0]
+                        v_upload_time = Feishu.get_range_value(
+                            "xiaoniangao", "ba0da4", "H" + str(i) + ":" + "H" + str(i))[0]
                         Common.logger().info("视频详情,video_send_time:{},{}", v_upload_time, type(v_upload_time))
 
-                        # 上传时间
+                        # 抓取时间:日期
+                        upload_data = v_upload_time.split(" ")[0]
+                        # 抓取时间:小时
                         upload_hour = v_upload_time.split(" ")[-1].split(":")[0]
 
                         url = "https://kapi.xiaoniangao.cn/profile/get_profile_by_id"
                         headers = {
-                            "x-b3-traceid": "158f6ce5d71337",
-                            "X-Token-Id": "af9c47bb6c942236ff35ee10d355f3b0-1164637358",
-                            "uid": "3d460a1b-ab85-426b-bd80-62029acaa2c0",
+                            "x-b3-traceid": "bd267349bf41b",
+                            "X-Token-Id": "86f6d7cc2b2b6870004df5d16c82aaf3-1185665701",
+                            "uid": "8fde3c6c-c070-4379-bfc4-15c7e85139c9",
                             "content-type": "application/json",
                             "Accept-Encoding": "gzip,compress,br,deflate",
                             "User-Agent": 'Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X)'
@@ -385,8 +395,8 @@ class HourList:
                             "no_follow": True,
                             "vid": v_id,
                             "hot_l1_comment": True,
-                            "token": "451273638af2c8bb90266bcfaf601a68",
-                            "uid": "3d460a1b-ab85-426b-bd80-62029acaa2c0",
+                            "token": "90747742180aeb22c0fe3a3c6a38f3d9",
+                            "uid": "8fde3c6c-c070-4379-bfc4-15c7e85139c9",
                             "proj": "ma",
                             "wx_ver": "8.0.20",
                             "code_ver": "3.62.0",
@@ -407,7 +417,7 @@ class HourList:
                                 },
                                 "pj": "1",
                                 "pf": "2",
-                                "session_id": "1a20f033-3511-4e7b-9b2a-95e5c542461f"
+                                "session_id": "7bcce313-b57d-4305-8d14-6ebd9a1bad29"
                             }
                         }
                         try:
@@ -415,85 +425,195 @@ class HourList:
                             r = requests.post(headers=headers, url=url, json=data, proxies=proxies, verify=False)
                             hour_play_cnt = r.json()["data"]["play_pv"]
                             Common.logger().info("视频详情,当前播放量:{}", hour_play_cnt)
-                            # 固定时间获取符合规则的视频,写入云文档:https://w42nne6hzg.feishu.cn/sheets/shtcngRPoDYAi24x52j2nDuHMih?sheet=k2rKkv
+                            # 固定时间获取符合规则的视频,写入云文档:https://w42nne6hzg.feishu.cn/sheets/shtcngRPoDYAi24x52j2nDuHMih?sheet=ba0da4
                             update_hour = datetime.datetime.now()
-                            if update_hour.hour == 10 and int(upload_hour) <= 10:
-                                Common.logger().info("满足条件: update_hour == 10 and int(upload_hour) < 10")
+                            if upload_data == cls.today and update_hour.hour == 10 and int(upload_hour) <= 10:
+                                Common.logger().info("满足条件: 抓取日期为今天 and 当前时间:10点 and 抓取时间<=10点")
+
                                 # 当天 10:00 视频播放量
                                 ten_hour_play_cnt = hour_play_cnt
                                 Common.logger().info("当天 10:00 视频播放量:{}", ten_hour_play_cnt)
+
                                 # 10:00 的上升榜写入数据
                                 values = int(ten_hour_play_cnt) - int(v_play_cnt)
                                 time.sleep(1)
-                                Feishu.update_hour_list_values("k2rKkv", "J"+str(i) + ":" + "J"+str(i), [[values]])
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i), [[values]])
                                 Common.logger().info("10:00数据更新成功:{}", values)
-                            elif update_hour.hour == 15 and int(upload_hour) <= 10:
-                                Common.logger().info("满足条件: update_hour == 15 and int(upload_hour) <= 10")
+
+                            elif upload_data == cls.today and update_hour.hour == 15 and int(upload_hour) <= 10:
+                                Common.logger().info("满足条件: 抓取日期为今天 and 当前时间:15点 and 抓取时间<=10点")
+
                                 # 当天 15:00 视频播放量
                                 fifteen_hour_play_cnt = hour_play_cnt
                                 Common.logger().info("当天 15:00 视频播放量:{}", fifteen_hour_play_cnt)
+
                                 # 当天 10:00 上升的数据
-                                if Feishu.get_range_value("k2rKkv", "J"+str(i) + ":" + "J"+str(i))[0] is None:
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J"+str(i) + ":" + "J"+str(i))[0] is None:
                                     ten_up_cnt = 0
                                 else:
-                                    ten_up_cnt = Feishu.get_range_value("k2rKkv", "J"+str(i) + ":" + "J"+str(i))[0]
+                                    ten_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J"+str(i) + ":" + "J"+str(i))[0]
+
                                 # 15:00 的上升榜写入数据
                                 values = int(fifteen_hour_play_cnt) - (int(v_play_cnt) + int(ten_up_cnt))
                                 time.sleep(1)
-                                Feishu.update_hour_list_values("k2rKkv", "K" + str(i) + ":" + "K" + str(i), [[values]])
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i), [[values]])
                                 Common.logger().info("15:00数据更新成功:{}", values)
-                            elif update_hour.hour == 15 and 10 < int(upload_hour) <= 15:
-                                Common.logger().info("满足条件: update_hour == 15 and 10 < int(upload_hour) < 15")
+
+                            elif upload_data == cls.today and update_hour.hour == 15 and 10 < int(upload_hour) <= 15:
+                                Common.logger().info("满足条件: 抓取日期为今天 and 当前时间:15点 and 10<抓取时间<=15点")
+
                                 # 当天 15:00 视频播放量
                                 fifteen_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 15:00 视频播放量:{}", fifteen_hour_play_cnt)
+
                                 # 15:00 的上升榜写入数据
                                 values = int(fifteen_hour_play_cnt) - int(v_play_cnt)
                                 time.sleep(1)
-                                Feishu.update_hour_list_values("k2rKkv", "K" + str(i) + ":" + "K" + str(i), [[values]])
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i), [[values]])
                                 Common.logger().info("15:00数据更新成功:{}", values)
-                            elif update_hour.hour == 20 and int(upload_hour) <= 10:
-                                Common.logger().info("满足条件: update_hour == 20 and int(upload_hour) <= 10")
+
+                            elif upload_data == cls.today and update_hour.hour == 20 and int(upload_hour) <= 10:
+                                Common.logger().info("满足条件: 抓取日期为今天 and 当前时间:20点 and 抓取时间<=10点")
+
                                 # 当天 20:00 视频播放量
                                 twenty_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 20:00 视频播放量:{}", twenty_hour_play_cnt)
+
                                 # 当天 10:00 上升的数据
-                                if Feishu.get_range_value("k2rKkv", "J" + str(i) + ":" + "J" + str(i))[0] is None:
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i))[0] is None:
                                     ten_up_cnt = 0
                                 else:
-                                    ten_up_cnt = Feishu.get_range_value("k2rKkv", "J" + str(i) + ":" + "J" + str(i))[0]
+                                    ten_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i))[0]
+
                                 # 当天 15:00 上升的数据
-                                if Feishu.get_range_value("k2rKkv", "K" + str(i) + ":" + "K" + str(i))[0] is None:
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i))[0] is None:
                                     fifteen_up_cnt = 0
                                 else:
-                                    fifteen_up_cnt = Feishu.get_range_value("k2rKkv", "K" + str(i) + ":" + "K" + str(i))[0]
+                                    fifteen_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i))[0]
+
                                 # 20:00 的上升榜写入数据
                                 values = int(twenty_hour_play_cnt) - (
                                         int(v_play_cnt) + int(ten_up_cnt) + int(fifteen_up_cnt))
                                 time.sleep(1)
-                                Feishu.update_hour_list_values("k2rKkv", "L" + str(i) + ":" + "L" + str(i), [[values]])
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "L" + str(i) + ":" + "L" + str(i), [[values]])
                                 Common.logger().info("20:00数据更新成功:{}", values)
-                            elif update_hour.hour == 20 and 10 < int(upload_hour) <= 15:
-                                Common.logger().info("满足条件: update_hour == 20 and 10 < int(upload_hour) < 15")
+
+                            elif upload_data == cls.today and update_hour.hour == 20 and 10 < int(upload_hour) <= 15:
+                                Common.logger().info("满足条件: 抓取日期为今天 and 当前时间:20点 and 10<抓取时间<=15点")
+
                                 # 当天 20:00 视频播放量
                                 twenty_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 20:00 视频播放量:{}", twenty_hour_play_cnt)
+
                                 # 当天 15:00 上升的数据
-                                if Feishu.get_range_value("k2rKkv", "K" + str(i) + ":" + "K" + str(i))[0] is None:
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i))[0] is None:
                                     fifteen_up_cnt = 0
                                 else:
-                                    fifteen_up_cnt = Feishu.get_range_value("k2rKkv", "K" + str(i) + ":" + "K" + str(i))[0]
+                                    fifteen_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i))[0]
+
                                 # 20:00 的上升榜写入数据
                                 values = int(twenty_hour_play_cnt) - (int(v_play_cnt) + int(fifteen_up_cnt))
                                 time.sleep(1)
-                                Feishu.update_hour_list_values("k2rKkv", "L" + str(i) + ":" + "L" + str(i), [[values]])
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "L" + str(i) + ":" + "L" + str(i), [[values]])
                                 Common.logger().info("20:00数据更新成功:{}", values)
-                            elif update_hour.hour == 20 and 15 < int(upload_hour) <= 20:
-                                Common.logger().info("满足条件: update_hour == 20 and 15 < int(upload_hour) < 20")
+
+                            elif upload_data == cls.today and update_hour.hour == 20 and 15 < int(upload_hour) <= 20:
+                                Common.logger().info("满足条件: 抓取日期为今天 and 当前时间:20点 and 15<抓取时间<=20点")
+
                                 # 当天 20:00 视频播放量
                                 twenty_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 20:00 视频播放量:{}", twenty_hour_play_cnt)
+
                                 # 20:00 的上升榜写入数据
                                 values = int(twenty_hour_play_cnt) - int(v_play_cnt)
                                 time.sleep(1)
-                                Feishu.update_hour_list_values("k2rKkv", "L" + str(i) + ":" + "L" + str(i), [[values]])
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "L" + str(i) + ":" + "L" + str(i), [[values]])
+                                Common.logger().info("20:00数据更新成功:{}", values)
+
+                            elif (upload_data == cls.yesterday or upload_data == cls.before_yesterday)\
+                                    and update_hour.hour == 10:
+                                Common.logger().info("满足条件: 抓取时间小于今天 and 当前时间:10点")
+
+                                # 当天 10:00 视频播放量
+                                ten_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 10:00 视频播放量:{}", ten_hour_play_cnt)
+
+                                # 10:00 的上升榜写入数据
+                                values = int(ten_hour_play_cnt) - int(v_play_cnt)
+                                time.sleep(1)
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i), [[values]])
+                                Common.logger().info("10:00数据更新成功:{}", values)
+
+                            elif (upload_data == cls.yesterday or upload_data == cls.before_yesterday)\
+                                    and update_hour.hour == 15:
+                                Common.logger().info("满足条件: 抓取时间小于今天 and 当前时间:15点")
+
+                                # 当天 15:00 视频播放量
+                                fifteen_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 15:00 视频播放量:{}", fifteen_hour_play_cnt)
+
+                                # 当天 10:00 上升的数据
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i))[0] is None:
+                                    ten_up_cnt = 0
+                                else:
+                                    ten_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i))[0]
+
+                                # 15:00 的上升榜写入数据
+                                values = int(fifteen_hour_play_cnt) - (int(v_play_cnt) + int(ten_up_cnt))
+                                time.sleep(1)
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i), [[values]])
+                                Common.logger().info("15:00数据更新成功:{}", values)
+
+                            elif (upload_data == cls.yesterday or upload_data == cls.before_yesterday)\
+                                    and update_hour.hour == 20:
+                                Common.logger().info("满足条件: 抓取时间小于今天 and 当前时间:20点")
+
+                                # 当天 20:00 视频播放量
+                                twenty_hour_play_cnt = hour_play_cnt
+                                Common.logger().info("当天 20:00 视频播放量:{}", twenty_hour_play_cnt)
+
+                                # 当天 10:00 上升的数据
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i))[0] is None:
+                                    ten_up_cnt = 0
+                                else:
+                                    ten_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "J" + str(i) + ":" + "J" + str(i))[0]
+
+                                # 当天 15:00 上升的数据
+                                if Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i))[0] is None:
+                                    fifteen_up_cnt = 0
+                                else:
+                                    fifteen_up_cnt = Feishu.get_range_value(
+                                        "xiaoniangao", "ba0da4", "K" + str(i) + ":" + "K" + str(i))[0]
+
+                                # 20:00 的上升榜写入数据
+                                values = int(twenty_hour_play_cnt) - (
+                                        int(v_play_cnt) + int(ten_up_cnt) + int(fifteen_up_cnt))
+                                time.sleep(1)
+                                Feishu.update_values(
+                                    "xiaoniangao", "ba0da4", "L" + str(i) + ":" + "L" + str(i), [[values]])
                                 Common.logger().info("20:00数据更新成功:{}", values)
+
                         except Exception as e:
                             Common.logger().error("视频详情:{},异常:{}", v_title, e)
         except Exception as e:
@@ -502,6 +622,6 @@ class HourList:
 
 if __name__ == "__main__":
     hour_list = HourList()
-    # hour_list.get_hour_list_feeds()
-    hour_list.del_null_rows()
+    hour_list.get_hour_list_feeds()
+    # hour_list.del_null_rows("xiaoniangao", "ba0da4", 3)
     hour_list.update_hour_list_data()

+ 4 - 4
main/run.py

@@ -15,7 +15,7 @@ from main.hour_list import HourList
 
 def xiaoniangao_prod_job():
     """
-    执行正式环境快手脚本
+    执行正式环境脚本
     """
     while True:
         # 当天下载及上传的视频数:150 条
@@ -34,7 +34,7 @@ def xiaoniangao_prod_job():
             time.sleep(random.randint(1, 3))
 
         # 删除冗余日志
-        Common.del_logs()
+        Common.del_logs("logs")
 
 
 def main_prod():
@@ -79,5 +79,5 @@ def main_hour_list():
 
 
 if __name__ == "__main__":
-    # main_prod()
-    main_hour_list()
+    main_prod()
+    # main_hour_list()

+ 51 - 0
main/run_hour_list.py

@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# @Author: wangkun
+# @Time: 2022/5/19
+import datetime
+import os
+import sys
+import time
+sys.path.append(os.getcwd())
+from main.common import Common
+from main.hour_list import HourList
+
+
+def hour_list_job():
+    while True:
+        while True:
+            # 获取符合规则的视频,写入列表
+            HourList.get_hour_list_feeds()
+            time.sleep(1)
+
+            hour_list_job_time = datetime.datetime.now()
+            if hour_list_job_time.hour == 10 and hour_list_job_time.minute <= 10:
+
+                Common.logger().info("检查今日上升榜日期是否存在")
+                HourList.check_hour_list_data()
+
+                Common.logger().info("开始更新上升榜")
+                HourList.update_hour_list_data()
+
+            elif hour_list_job_time.hour == 15 and hour_list_job_time.minute <= 10:
+
+                Common.logger().info("检查今日上升榜日期是否存在")
+                HourList.check_hour_list_data()
+
+                Common.logger().info("开始更新上升榜")
+                HourList.update_hour_list_data()
+
+            elif hour_list_job_time.hour == 20 and hour_list_job_time.minute <= 10:
+
+                Common.logger().info("检查今日上升榜日期是否存在")
+                HourList.check_hour_list_data()
+
+                Common.logger().info("开始更新上升榜")
+                HourList.update_hour_list_data()
+
+            elif hour_list_job_time.hour == 23 and hour_list_job_time.minute >= 55:
+
+                break
+
+
+if __name__ == "__main__":
+    hour_list_job()

+ 3 - 0
main/run_person.py

@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+# @Author: wangkun
+# @Time: 2022/5/18

+ 3 - 0
person-logs/__init__.py

@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+# @Author: wangkun
+# @Time: 2022/5/18