zhangyong 1 month ago
parent
commit
d38ecbec78
3 changed files with 83 additions and 60 deletions
  1. 2 1
      requirements.txt
  2. 66 47
      utils/feishu_utils.py
  3. 15 12
      workers/consumption_work.py

+ 2 - 1
requirements.txt

@@ -10,4 +10,5 @@ requests==2.32.3
 schedule==1.2.2
 pymysql==1.0.2
 orjson==3.10.13
-apscheduler==3.11.0
+apscheduler==3.11.0
+urllib3==2.3.0

+ 66 - 47
utils/feishu_utils.py

@@ -4,14 +4,10 @@
 飞书表配置: token 鉴权 / 增删改查 / 机器人报警
 """
 import json
-import os
-import sys
 import requests
 import urllib3
 from loguru import logger
 
-sys.path.append(os.getcwd())
-
 proxies = {"http": None, "https": None}
 
 
@@ -40,15 +36,10 @@ class Feishu:
         url = "https://open.feishu.cn/open-apis/auth/v3/tenant_access_token/internal/"
         post_data = {"app_id": "cli_a13ad2afa438d00b",  # 这里账号密码是发布应用的后台账号及密码
                      "app_secret": "4tK9LY9VbiQlY5umhE42dclBFo6t4p5O"}
-
-        try:
-            urllib3.disable_warnings()
-            response = requests.post(url=url, data=post_data, proxies=proxies, verify=False)
-            tenant_access_token = response.json()["tenant_access_token"]
-            return tenant_access_token
-        except Exception as e:
-            logger.error(f"[+] 飞书获取飞书 api token 异常:{e}")
-
+        urllib3.disable_warnings()
+        response = requests.post(url=url, data=post_data, proxies=proxies, verify=False)
+        tenant_access_token = response.json()["tenant_access_token"]
+        return tenant_access_token
 
     # 获取表格元数据
     @classmethod
@@ -74,7 +65,7 @@ class Feishu:
             response = json.loads(r.content.decode("utf8"))
             return response
         except Exception as e:
-            logger.error(f"[+] 飞书获取表格元数据异常:{e}")
+            logger.error("获取表格元数据异常:{}", e)
 
     # 读取工作表中所有数据
     @classmethod
@@ -85,26 +76,25 @@ class Feishu:
         :param sheetid: 哪张表
         :return: 所有数据
         """
-        try:
-            get_values_batch_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
-                                   + cls.spreadsheettoken(crawler) + "/values_batch_get"
-            headers = {
-                "Authorization": "Bearer " + cls.get_token(),
-                "Content-Type": "application/json; charset=utf-8"
-            }
-            params = {
-                "ranges": sheetid,
-                "valueRenderOption": "ToString",
-                "dateTimeRenderOption": "",
-                "user_id_type": "open_id"
-            }
-            urllib3.disable_warnings()
-            r = requests.get(url=get_values_batch_url, headers=headers, params=params, proxies=proxies, verify=False)
-            response = json.loads(r.content.decode("utf8"))
-            values = response["data"]["valueRanges"][0]["values"]
-            return values
-        except Exception as e:
-            logger.error(f"[+] 飞书读取工作表所有数据异常:{e}")
+
+        get_values_batch_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                               + cls.spreadsheettoken(crawler) + "/values_batch_get"
+        headers = {
+            "Authorization": "Bearer " + cls.get_token(),
+            "Content-Type": "application/json; charset=utf-8"
+        }
+        params = {
+            "ranges": sheetid,
+            "valueRenderOption": "ToString",
+            "dateTimeRenderOption": "",
+            "user_id_type": "open_id"
+        }
+        urllib3.disable_warnings()
+        r = requests.get(url=get_values_batch_url, headers=headers, params=params, proxies=proxies, verify=False)
+        response = json.loads(r.content.decode("utf8"))
+        values = response["data"]["valueRanges"][0]["values"]
+        return values
+
 
     # 工作表,插入行或列
     @classmethod
@@ -136,9 +126,9 @@ class Feishu:
             }
 
             urllib3.disable_warnings()
-            r = requests.post(url=insert_columns_url, headers=headers, json=body, proxies=proxies, verify=False, timeout=10)
+            r = requests.post(url=insert_columns_url, headers=headers, json=body, proxies=proxies, verify=False)
         except Exception as e:
-            logger.error(f"[+] 飞书插入行或列异常:{e}")
+            logger.error("插入行或列异常:{}", e)
 
     # 写入数据
     @classmethod
@@ -167,9 +157,36 @@ class Feishu:
                 ],
             }
             urllib3.disable_warnings()
-            r = requests.post(url=update_values_url, headers=headers, json=body, proxies=proxies, verify=False, timeout=10)
+            r = requests.post(url=update_values_url, headers=headers, json=body, proxies=proxies, verify=False)
         except Exception as e:
-            logger.error(f"[+] 飞书写入数据异常:{e}")
+            logger.error("写入数据异常:{}", e)
+
+    # 合并单元格
+    @classmethod
+    def merge_cells(cls, crawler, sheetid, ranges):
+        """
+        合并单元格
+        :param log_type: 日志路径
+        :param crawler: 哪个爬虫
+        :param sheetid:哪张工作表
+        :param ranges:需要合并的单元格范围
+        """
+        try:
+            merge_cells_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                              + cls.spreadsheettoken(crawler) + "/merge_cells"
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+
+            body = {
+                "range": sheetid + "!" + ranges,
+                "mergeType": "MERGE_ROWS"
+            }
+            urllib3.disable_warnings()
+            r = requests.post(url=merge_cells_url, headers=headers, json=body, proxies=proxies, verify=False)
+        except Exception as e:
+            logger.error("合并单元格异常:{}", e)
 
     # 读取单元格数据
     @classmethod
@@ -199,10 +216,11 @@ class Feishu:
                 "user_id_type": "open_id"
             }
             urllib3.disable_warnings()
-            r = requests.get(url=get_range_value_url, headers=headers, params=params, proxies=proxies, verify=False, timeout=10)
+            r = requests.get(url=get_range_value_url, headers=headers, params=params, proxies=proxies, verify=False)
+            # logger.error(r.text)
             return r.json()["data"]["valueRange"]["values"][0]
         except Exception as e:
-            logger.error(f"[+] 飞书读取单元格数据异常:{e}")
+            logger.error("读取单元格数据异常:{}", e)
     # 获取表内容
     @classmethod
     def get_sheet_content(cls, crawler, sheet_id):
@@ -217,11 +235,11 @@ class Feishu:
                         content_list.append(y)
             return content_list
         except Exception as e:
-            logger.error(f"[+] 飞书get_sheet_content:{e}")
+            logger.error(f'get_sheet_content:{e}\n')
 
     # 删除行或列,可选 ROWS、COLUMNS
     @classmethod
-    def dimension_range(cls, crawler, sheetid, major_dimension, startindex, endindex):
+    def dimension_range(cls, log_type, crawler, sheetid, major_dimension, startindex, endindex):
         """
         删除行或列
         :param log_type: 日志路径
@@ -250,7 +268,7 @@ class Feishu:
             urllib3.disable_warnings()
             r = requests.delete(url=dimension_range_url, headers=headers, json=body, proxies=proxies, verify=False)
         except Exception as e:
-            logger.error(f"[+] 飞书删除视频数据异常:{e}")
+            logger.error("删除视频数据异常:{}", e)
 
     # 获取用户 ID
     @classmethod
@@ -280,7 +298,8 @@ class Feishu:
 
             return open_id
         except Exception as e:
-            logger.error(f"[+] 飞书get_userid异常:{e}")
+            pass
+            # logger.error(f"get_userid异常:{e}\n")
 
     # 飞书机器人
     @classmethod
@@ -339,8 +358,7 @@ class Feishu:
             urllib3.disable_warnings()
             r = requests.post(url, headers=headers, data=data, verify=False, proxies=proxies)
         except Exception as e:
-            logger.error(f"[+] 飞书bot异常:{e}")
-
+            logger.error(f"bot异常:{e}\n")
 
     # 飞书机器人-改造计划完成通知
     @classmethod
@@ -372,7 +390,8 @@ class Feishu:
             urllib3.disable_warnings()
             r = requests.post(url, headers=headers, data=data, verify=False, proxies=proxies)
         except Exception as e:
-            logger.error(f"[+] 飞书bot异常:{e}")
+            logger.error(f"bot异常:{e}\n")
+
 
 if __name__ == "__main__":
     Feishu.bot('recommend', '抖音', '测试: 抖音cookie失效,请及时更换')

+ 15 - 12
workers/consumption_work.py

@@ -117,9 +117,9 @@ class ConsumptionRecommend(object):
                     task['channel'],
                     task['channel_url'],
                     str(video['video_id']),
-                    n_id,
+                    str(n_id),
                     video['old_title'],
-                    task['ai_title'] if task['ai_title'] in ["原标题", "AI标题"] else "",
+                    "AI标题",
                     title,
                     str(code),
                     formatted_time,
@@ -127,22 +127,25 @@ class ConsumptionRecommend(object):
                     explain,
                     voice,
                     task['first_category'],
-                    task.get('secondary_category',''),
+                    "",
+                    # task.get('secondary_category',''),
                     task['keyword_name'],
                     pq_url
                 ]
             ]
             if fs_channel_name == "抖音品类账号":
-                sheet = "ZixHmf"
+                sheet = "905313"
             elif fs_channel_name == "快手品类账号":
-                sheet = "ibjoMx"
-            elif fs_channel_name == "抖音关键词搜索" or fs_channel_name == "快手关键词搜索":
-                sheet = "rBAJT8"
+                sheet = "JHVpNK"
+            elif fs_channel_name == "抖音关键词搜索":
+                sheet = "6nclDV"
+            elif fs_channel_name == "快手关键词搜索":
+                sheet = "PVd8nj"
             elif fs_channel_name == "快手小程序":
-                sheet = "GeDT6Q"
-            Feishu.insert_columns("ILb4sa0LahddRktnRipcu2vQnLb", sheet, "ROWS", 1, 2)
+                sheet = "dEjDt1"
+            Feishu.insert_columns("L2KGsz5HzhDfyYtV9IRcLHjtnsg", sheet, "ROWS", 1, 2)
             time.sleep(0.5)
-            Feishu.update_values("ILb4sa0LahddRktnRipcu2vQnLb", sheet, "A2:Z2", values)
+            Feishu.update_values("L2KGsz5HzhDfyYtV9IRcLHjtnsg", sheet, "A2:Z2", values)
             logger.info(f"[处理] 写入飞书成功")
         except Exception as e:
             logger.error(f"[处理] 写入飞书失败{e}")
@@ -336,7 +339,7 @@ class ConsumptionRecommend(object):
         file_path = os.path.join(CACHE_DIR, uid)
         logger.info(f"[机器改造] 开始获取redis数据")
         fs_data = os.getenv("FS_DATA")
-        fs_data = '快手关键词搜索,ks-gjc,B65Gs3bCHhIzj0t7KGWcwZD0nGf,91wp7k,AIzaSyBiwivvBKfqDsxvqAKBrCZyk-wFMhfthXg'
+        # fs_data = '快手关键词搜索,ks-gjc,B65Gs3bCHhIzj0t7KGWcwZD0nGf,91wp7k,AIzaSyBiwivvBKfqDsxvqAKBrCZyk-wFMhfthXg'
         try:
             fs_data_list = fs_data.split(',')
             fs_channel_name = fs_data_list[0]
@@ -412,4 +415,4 @@ def run():
 
 
 if __name__ == '__main__':
-    ConsumptionRecommend.run()
+    run()