zhangyong 4 months ago
parent
commit
cbb5a69cc6
14 changed files with 1046 additions and 0 deletions
  1. 14 0
      Dockerfile
  2. 0 0
      common/__init__.py
  3. 43 0
      common/feishu_form.py
  4. 379 0
      common/feishu_utils.py
  5. 83 0
      common/piaoquan.py
  6. 59 0
      common/redis.py
  7. 23 0
      common/tag_video.py
  8. 240 0
      docker-compose.yml
  9. 5 0
      entrypoint.sh
  10. 53 0
      pq_data_handle.py
  11. 0 0
      pq_video/__init__.py
  12. 102 0
      pq_video/pq_video.py
  13. 34 0
      pq_video_reids.py
  14. 11 0
      requirements.txt

+ 14 - 0
Dockerfile

@@ -0,0 +1,14 @@
+FROM python:3.11-slim
+
+WORKDIR /app
+
+COPY . .
+
+ENV TZ=Asia/Shanghai
+
+RUN apt update && apt --no-install-recommends install -y wget xz-utils nscd libgl-dev libglib2.0-dev fonts-wqy-zenhei \
+    && apt-get clean && rm -rf /var/lib/apt/lists/* \
+    && pip install -r requirements.txt --no-cache-dir \
+    && mkdir -p /app/cache
+
+ENTRYPOINT ["python", "/app/pq_video_redis.py"]

+ 0 - 0
common/__init__.py


+ 43 - 0
common/feishu_form.py

@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+import json
+import os
+import sys
+sys.path.append(os.getcwd())
+from common.feishu_utils import Feishu
+
+
+class Material():
+    """
+    获取品类对应负责人任务明细
+    """
+    @classmethod
+    def get_carry_data(cls, dt, FS_SHEET, NAME):
+        data = Feishu.get_values_batch( "NFHPswCE7haebJt2m4rcBnuUnUc", FS_SHEET )
+        processed_list = []
+        try:
+            for row in data[2:]:
+                activate_data = row[5]  # 启动日期
+                if not activate_data:
+                    continue
+                if int(activate_data) != int(dt):
+                    continue
+                channel_mark = row[0]
+                pq_ids = row[1]
+                pq_label = row[2] # 站内标签
+                video_id = row[3] # 站内账号id
+                title = row[4] #标题类别
+                number_dict = {
+                    "channel_mark": channel_mark,
+                    "name":NAME,
+                    "pq_ids": pq_ids,
+                    "pq_label": pq_label,
+                    "activate_data": activate_data,
+                    "video_id": video_id,
+                    "title": title,
+                    "dt":dt
+                }
+                processed_list.append(json.dumps(number_dict, ensure_ascii=False))
+            return  processed_list
+        except:
+            return processed_list
+

+ 379 - 0
common/feishu_utils.py

@@ -0,0 +1,379 @@
+# -*- coding: utf-8 -*-
+# @Time: 2023/12/26
+"""
+飞书表配置: token 鉴权 / 增删改查 / 机器人报警
+"""
+import json
+import os
+import sys
+import requests
+import urllib3
+from loguru import logger
+
+sys.path.append(os.getcwd())
+
+proxies = {"http": None, "https": None}
+
+
+class Feishu:
+    """
+    编辑飞书云文档
+    """
+    succinct_url = "https://w42nne6hzg.feishu.cn/sheets/"
+    # 飞书路径token
+    @classmethod
+    def spreadsheettoken(cls, crawler):
+        if crawler == "summary":
+            return "KsoMsyP2ghleM9tzBfmcEEXBnXg"
+        else:
+            return crawler
+
+
+
+    # 获取飞书api token
+    @classmethod
+    def get_token(cls):
+        """
+        获取飞书api token
+        :return:
+        """
+        url = "https://open.feishu.cn/open-apis/auth/v3/tenant_access_token/internal/"
+        post_data = {"app_id": "cli_a13ad2afa438d00b",  # 这里账号密码是发布应用的后台账号及密码
+                     "app_secret": "4tK9LY9VbiQlY5umhE42dclBFo6t4p5O"}
+
+        try:
+            urllib3.disable_warnings()
+            response = requests.post(url=url, data=post_data, proxies=proxies, verify=False)
+            tenant_access_token = response.json()["tenant_access_token"]
+            return tenant_access_token
+        except Exception as e:
+            logger.error(f"[+] 飞书获取飞书 api token 异常:{e}")
+
+
+    # 获取表格元数据
+    @classmethod
+    def get_metainfo(cls, crawler):
+        """
+        获取表格元数据
+        :return:
+        """
+        try:
+            get_metainfo_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                               + cls.spreadsheettoken(crawler) + "/metainfo"
+
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            params = {
+                "extFields": "protectedRange",  # 额外返回的字段,extFields=protectedRange时返回保护行列信息
+                "user_id_type": "open_id"  # 返回的用户id类型,可选open_id,union_id
+            }
+            urllib3.disable_warnings()
+            r = requests.get(url=get_metainfo_url, headers=headers, params=params, proxies=proxies, verify=False)
+            response = json.loads(r.content.decode("utf8"))
+            return response
+        except Exception as e:
+            logger.error(f"[+] 飞书获取表格元数据异常:{e}")
+
+    # 读取工作表中所有数据
+    @classmethod
+    def get_values_batch(cls, crawler, sheetid):
+        """
+        读取工作表中所有数据
+        :param crawler: 哪个爬虫
+        :param sheetid: 哪张表
+        :return: 所有数据
+        """
+        try:
+            get_values_batch_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                                   + cls.spreadsheettoken(crawler) + "/values_batch_get"
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            params = {
+                "ranges": sheetid,
+                "valueRenderOption": "ToString",
+                "dateTimeRenderOption": "",
+                "user_id_type": "open_id"
+            }
+            urllib3.disable_warnings()
+            r = requests.get(url=get_values_batch_url, headers=headers, params=params, proxies=proxies, verify=False)
+            response = json.loads(r.content.decode("utf8"))
+            values = response["data"]["valueRanges"][0]["values"]
+            return values
+        except Exception as e:
+            logger.error(f"[+] 飞书读取工作表所有数据异常:{e}")
+
+    # 工作表,插入行或列
+    @classmethod
+    def insert_columns(cls, crawler, sheetid, majordimension, startindex, endindex):
+        """
+        工作表插入行或列
+        :param log_type: 日志路径
+        :param crawler: 哪个爬虫的云文档
+        :param sheetid:哪张工作表
+        :param majordimension:行或者列, ROWS、COLUMNS
+        :param startindex:开始位置
+        :param endindex:结束位置
+        """
+        try:
+            insert_columns_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                                 + cls.spreadsheettoken(crawler) + "/insert_dimension_range"
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            body = {
+                "dimension": {
+                    "sheetId": sheetid,
+                    "majorDimension": majordimension,  # 默认 ROWS ,可选 ROWS、COLUMNS
+                    "startIndex": startindex,  # 开始的位置
+                    "endIndex": endindex  # 结束的位置
+                },
+                "inheritStyle": "AFTER"  # BEFORE 或 AFTER,不填为不继承 style
+            }
+
+            urllib3.disable_warnings()
+            r = requests.post(url=insert_columns_url, headers=headers, json=body, proxies=proxies, verify=False)
+        except Exception as e:
+            logger.error(f"[+] 飞书插入行或列异常:{e}")
+
+    # 写入数据
+    @classmethod
+    def update_values(cls, crawler, sheetid, ranges, values):
+        """
+        写入数据
+        :param log_type: 日志路径
+        :param crawler: 哪个爬虫的云文档
+        :param sheetid:哪张工作表
+        :param ranges:单元格范围
+        :param values:写入的具体数据,list
+        """
+        try:
+            update_values_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                                + cls.spreadsheettoken(crawler) + "/values_batch_update"
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            body = {
+                "valueRanges": [
+                    {
+                        "range": sheetid + "!" + ranges,
+                        "values": values
+                    },
+                ],
+            }
+            urllib3.disable_warnings()
+            r = requests.post(url=update_values_url, headers=headers, json=body, proxies=proxies, verify=False)
+        except Exception as e:
+            logger.error(f"[+] 飞书写入数据异常:{e}")
+
+    # 读取单元格数据
+    @classmethod
+    def get_range_value(cls, crawler, sheetid, cell):
+        """
+        读取单元格内容
+        :param log_type: 日志路径
+        :param crawler: 哪个爬虫
+        :param sheetid: 哪张工作表
+        :param cell: 哪个单元格
+        :return: 单元格内容
+        """
+        try:
+            get_range_value_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                                  + cls.spreadsheettoken(crawler) + "/values/" + sheetid + "!" + cell
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            params = {
+                "valueRenderOption": "FormattedValue",
+
+                # dateTimeRenderOption=FormattedString 计算并对时间日期按照其格式进行格式化,但不会对数字进行格式化,返回格式化后的字符串。
+                "dateTimeRenderOption": "",
+
+                # 返回的用户id类型,可选open_id,union_id
+                "user_id_type": "open_id"
+            }
+            urllib3.disable_warnings()
+            r = requests.get(url=get_range_value_url, headers=headers, params=params, proxies=proxies, verify=False)
+            return r.json()["data"]["valueRange"]["values"][0]
+        except Exception as e:
+            logger.error(f"[+] 飞书读取单元格数据异常:{e}")
+    # 获取表内容
+    @classmethod
+    def get_sheet_content(cls, crawler, sheet_id):
+        try:
+            sheet = Feishu.get_values_batch(crawler, sheet_id)
+            content_list = []
+            for x in sheet:
+                for y in x:
+                    if y is None:
+                        pass
+                    else:
+                        content_list.append(y)
+            return content_list
+        except Exception as e:
+            logger.error(f"[+] 飞书get_sheet_content:{e}")
+
+    # 删除行或列,可选 ROWS、COLUMNS
+    @classmethod
+    def dimension_range(cls, crawler, sheetid, major_dimension, startindex, endindex):
+        """
+        删除行或列
+        :param log_type: 日志路径
+        :param crawler: 哪个爬虫
+        :param sheetid:工作表
+        :param major_dimension:默认 ROWS ,可选 ROWS、COLUMNS
+        :param startindex:开始的位置
+        :param endindex:结束的位置
+        :return:
+        """
+        try:
+            dimension_range_url = "https://open.feishu.cn/open-apis/sheets/v2/spreadsheets/" \
+                                  + cls.spreadsheettoken(crawler) + "/dimension_range"
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            body = {
+                "dimension": {
+                    "sheetId": sheetid,
+                    "majorDimension": major_dimension,
+                    "startIndex": startindex,
+                    "endIndex": endindex
+                }
+            }
+            urllib3.disable_warnings()
+            r = requests.delete(url=dimension_range_url, headers=headers, json=body, proxies=proxies, verify=False)
+        except Exception as e:
+            logger.error(f"[+] 飞书删除视频数据异常:{e}")
+
+    # 获取用户 ID
+    @classmethod
+    def get_userid(cls, username):
+        try:
+            url = "https://open.feishu.cn/open-apis/user/v1/batch_get_id?"
+            headers = {
+                "Authorization": "Bearer " + cls.get_token(),
+                "Content-Type": "application/json; charset=utf-8"
+            }
+            name_phone_dict = {
+                "xinxin": "15546206651",
+                "muxinyi": "13699208058",
+                "wangxueke": "13513479926",
+                "yuzhuoyi": "18624010360",
+                "luojunhui": "18801281360",
+                "fanjun": "15200827642",
+                "zhangyong": "17600025055",
+                'liukunyu': "18810931977"
+            }
+            username = name_phone_dict.get(username)
+
+            data = {"mobiles": [username]}
+            urllib3.disable_warnings()
+            r = requests.get(url=url, headers=headers, params=data, verify=False, proxies=proxies)
+            open_id = r.json()["data"]["mobile_users"][username][0]["open_id"]
+
+            return open_id
+        except Exception as e:
+            logger.error(f"[+] 飞书get_userid异常:{e}")
+
+    # 飞书机器人
+    @classmethod
+    def bot(cls, log_type, crawler, text, mark_name):
+        try:
+
+            headers = {'Content-Type': 'application/json'}
+            if crawler == "机器自动改造消息通知":
+                url = "https://open.feishu.cn/open-apis/bot/v2/hook/e7697dc6-5254-4411-8b59-3cd0742bf703"
+                sheet_url = "https://w42nne6hzg.feishu.cn/sheets/KsoMsyP2ghleM9tzBfmcEEXBnXg?sheet=bc154d"
+                users = f"<at id=" + str(cls.get_userid(log_type)) + f">{mark_name}</at>"
+            elif crawler == "快手关键词搜索":
+                url = "https://open.feishu.cn/open-apis/bot/v2/hook/e7697dc6-5254-4411-8b59-3cd0742bf703"
+                sheet_url = "https://w42nne6hzg.feishu.cn/sheets/KsoMsyP2ghleM9tzBfmcEEXBnXg?sheet=U1gySe"
+                users = "".join([f'<at id="{cls.get_userid(type)}">{name}</at>' for type, name in
+                                 zip(log_type, mark_name)])
+                # users = f"<at id=" + str(cls.get_userid(log_type)) + f">{mark_name}</at>"
+            else:
+                url = "https://open.feishu.cn/open-apis/bot/v2/hook/7928f182-08c1-4c4d-b2f7-82e10c93ca80"
+                sheet_url = "https://w42nne6hzg.feishu.cn/sheets/KsoMsyP2ghleM9tzBfmcEEXBnXg?sheet=bc154d"
+                users = f"<at id=" + str(cls.get_userid(log_type)) + f">{mark_name}</at>"
+            data = json.dumps({
+                "msg_type": "interactive",
+                "card": {
+                    "config": {
+                        "wide_screen_mode": True,
+                        "enable_forward": True
+                    },
+                    "elements": [{
+                        "tag": "div",
+                        "text": {
+                            "content": users + text,
+                            "tag": "lark_md"
+                        }
+                    }, {
+                        "actions": [{
+                            "tag": "button",
+                            "text": {
+                                "content": "详情,点击~~~~~",
+                                "tag": "lark_md"
+                            },
+                            "url": sheet_url,
+                            "type": "default",
+                            "value": {}
+                        }],
+                        "tag": "action"
+                    }],
+                    "header": {
+                        "title": {
+                            "content": "📣消息提醒",
+                            "tag": "plain_text"
+                        }
+                    }
+                }
+            })
+            urllib3.disable_warnings()
+            r = requests.post(url, headers=headers, data=data, verify=False, proxies=proxies)
+        except Exception as e:
+            logger.error(f"[+] 飞书bot异常:{e}")
+
+
+    # 飞书机器人-改造计划完成通知
+    @classmethod
+    def finish_bot(cls, text, url, content):
+        try:
+            headers = {'Content-Type': 'application/json'}
+            data = json.dumps({
+                "msg_type": "interactive",
+                "card": {
+                    "config": {
+                        "wide_screen_mode": True,
+                        "enable_forward": True
+                    },
+                    "elements": [{
+                        "tag": "div",
+                        "text": {
+                            "content": text,
+                            "tag": "lark_md"
+                        }
+                    }],
+                    "header": {
+                        "title": {
+                            "content": content,
+                            "tag": "plain_text"
+                        }
+                    }
+                }
+            })
+            urllib3.disable_warnings()
+            r = requests.post(url, headers=headers, data=data, verify=False, proxies=proxies)
+        except Exception as e:
+            logger.error(f"[+] 飞书bot异常:{e}")
+
+if __name__ == "__main__":
+    Feishu.bot('recommend', '抖音', '测试: 抖音cookie失效,请及时更换')
+

+ 83 - 0
common/piaoquan.py

@@ -0,0 +1,83 @@
+
+import requests
+from urllib.parse import urlencode
+import json
+
+
+
+class PQ:
+
+    """
+    新生成视频上传到对应账号下
+    """
+    @classmethod
+    def insert_piaoquantv(cls, new_video_path, new_title, n_id, cover_path):
+        url = "https://vlogapi.piaoquantv.com/longvideoapi/crawler/video/send?muid=999"
+        headers = {
+            'User-Agent': 'PQSpeed/486 CFNetwork/1410.1 Darwin/22.6.0',
+            'cookie': 'JSESSIONID=4DEA2B5173BB9A9E82DB772C0ACDBC9F; JSESSIONID=D02C334150025222A0B824A98B539B78',
+            'referer': 'http://appspeed.piaoquantv.com',
+            'token': '524a8bc871dbb0f4d4717895083172ab37c02d2f',
+            'accept-language': 'zh-CN,zh-Hans;q=0.9',
+            'Content-Type': 'application/x-www-form-urlencoded'
+        }
+        payload = {
+            'deviceToken': '9ef064f2f7869b3fd67d6141f8a899175dddc91240971172f1f2a662ef891408',
+            'fileExtensions': 'MP4',
+            'loginUid': n_id,
+            'networkType': 'Wi-Fi',
+            'platform': 'iOS',
+            'requestId': 'fb972cbd4f390afcfd3da1869cd7d001',
+            'sessionId': '362290597725ce1fa870d7be4f46dcc2',
+            'subSessionId': '362290597725ce1fa870d7be4f46dcc2',
+            'title': new_title,
+            'token': '524a8bc871dbb0f4d4717895083172ab37c02d2f',
+            'uid': n_id,
+            'versionCode': '486',
+            'versionName': '3.4.12',
+            'videoFromScene': '1',
+            'videoPath': new_video_path,
+            'viewStatus': '1',
+            'coverImgPath' : cover_path
+        }
+        encoded_payload = urlencode(payload)
+        response = requests.request("POST", url, headers=headers, data=encoded_payload)
+        data = response.json()
+        code = data["code"]
+        if code == 0:
+            new_video_id = data["data"]["id"]
+            print(new_video_id)
+            return new_video_id
+        return None
+
+    @classmethod
+    def get_pq_oss(cls,video_id):
+        try:
+            url = "https://longvideoapi.piaoquantv.com/longvideoapi/openapi/video/getBaseInfo"
+
+            payload = json.dumps({
+                "videoId": int(video_id)
+            })
+            headers = {
+                'Content-Type': 'application/json',
+                'Cookie': 'JSESSIONID=658158EABFCF6AC9B9BB0D8B61897A88'
+            }
+            for i in range(3):
+                response = requests.request("POST", url, headers=headers, data=payload)
+                response = response.json()
+                code = response['code']
+                if code == 0:
+                    data = response['data']
+                    video_path = data["videoPath"]
+                    cover_path = data["coverImgPath"]
+                    title = data["title"]
+                    return video_path, cover_path, title
+            return None, None, None
+        except Exception as e:
+            return None, None, None
+
+
+
+
+if __name__ == '__main__':
+    PQ.insert_piaoquantv("carry/video/4a5d3820-d95c-4676-a451-8521e2a54f6e","这个视频,分享给我的老友,祝愿您能幸福安康","76773417","carry/snapshot/4a5d3820-d95c-4676-a451-8521e2a54f6e_0")

+ 59 - 0
common/redis.py

@@ -0,0 +1,59 @@
+import redis
+
+from common.feishu_form import Material
+
+
+class SyncRedisHelper:
+    _pool: redis.ConnectionPool = None
+    _instance = None
+
+    def __init__(self):
+        if not self._instance:
+            self._pool = self._get_pool()
+            self._instance = self
+
+    def _get_pool(self) -> redis.ConnectionPool:
+        if self._pool is None:
+            self._pool = redis.ConnectionPool(
+                host="r-bp1mb0v08fqi4hjffupd.redis.rds.aliyuncs.com",  # 外网地址
+                # host="r-bp1mb0v08fqi4hjffu.redis.rds.aliyuncs.com",  # 内网地址
+                port=6379,
+                db=0,
+                password="Wqsd@2019",
+                # password="Qingqu2019",
+
+            )
+        return self._pool
+
+    def get_client(self) -> redis.Redis:
+        pool = self._get_pool()
+        client = redis.Redis(connection_pool=pool)
+        return client
+
+    def close(self):
+        if self._pool:
+            self._pool.disconnect(inuse_connections=True)
+
+def insert_carry_data(dt, REDIS_NAME,FS_SHEET, NAME):
+    data = Material.get_carry_data(dt, FS_SHEET,NAME)
+    if not data:
+        return 0
+    helper = SyncRedisHelper()
+    client = helper.get_client()
+    client.rpush(REDIS_NAME, *data)
+    return len(data)
+
+def get_carry_data(REDIS_NAME):
+    """获取一条需要打标签的视频"""
+    helper = SyncRedisHelper()
+    client = helper.get_client()
+    ret = client.lpop(REDIS_NAME)
+    return ret
+
+def in_carry_video_data(REDIS_NAME, ret):
+    """获取/处理失败重新写入"""
+    helper = SyncRedisHelper()
+    client = helper.get_client()
+    client.rpush(REDIS_NAME, str(ret))
+
+

+ 23 - 0
common/tag_video.py

@@ -0,0 +1,23 @@
+import requests
+import json
+
+class Tag:
+    @classmethod
+    def video_tag(cls, pq_id: str, tag: str):
+        try:
+            url = "https://admin.piaoquantv.com/manager/video/tag/addVideoTags"
+
+            payload = json.dumps({
+                "videoId": pq_id,
+                "tagNames": tag
+            })
+            headers = {
+                'Content-Type': 'application/json'
+            }
+
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            code = response['code']
+            return code
+        except:
+            return 1

+ 240 - 0
docker-compose.yml

@@ -0,0 +1,240 @@
+services:
+  worker1:
+    build:
+      context: .
+      dockerfile: Dockerfile
+    image: pq
+    container_name: pq_worker1
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=a0a5dc
+      - TASK_TYPE=redis
+      - NAME=范军
+      - REDIS_NAME=task:pq_redis_fj
+    networks:
+      - carry_net
+  worker2:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker2
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=wYYNCk
+      - TASK_TYPE=redis
+      - NAME=鲁涛
+      - REDIS_NAME=task:pq_redis_lt
+    networks:
+      - carry_net
+  worker3:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker3
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=Sij2y3
+      - TASK_TYPE=redis
+      - NAME=余海涛
+      - REDIS_NAME=task:pq_redis_yht
+    networks:
+      - carry_net
+  worker4:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker4
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=0JPBgt
+      - TASK_TYPE=redis
+      - NAME=罗情
+      - REDIS_NAME=task:pq_redis_lq
+    networks:
+      - carry_net
+  worker5:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker5
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=TlBJeE
+      - TASK_TYPE=redis
+      - NAME=刘诗雨
+      - REDIS_NAME=task:pq_redis_lsy
+    networks:
+      - carry_net
+  worker7:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker7
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=e5fmtM
+      - TASK_TYPE=redis
+      - NAME=周仙琴
+      - REDIS_NAME=task:pq_redis_zxq
+    networks:
+      - carry_net
+  worker8:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker8
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=a0a5dc
+      - TASK_TYPE=redis
+      - NAME=范军
+      - REDIS_NAME=task:pq_redis_fj
+#    volumes:
+#      - "./pq_handle.py:/app/pq_handle.py"
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker9:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker9
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=wYYNCk
+      - TASK_TYPE=redis
+      - NAME=鲁涛
+      - REDIS_NAME=task:pq_redis_lt
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker10:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker10
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=Sij2y3
+      - TASK_TYPE=redis
+      - NAME=余海涛
+      - REDIS_NAME=task:pq_redis_yht
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker11:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker11
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=0JPBgt
+      - TASK_TYPE=redis
+      - NAME=罗情
+      - REDIS_NAME=task:pq_redis_lq
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker12:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker12
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=TlBJeE
+      - TASK_TYPE=redis
+      - NAME=刘诗雨
+      - REDIS_NAME=task:pq_redis_lsy
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker14:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker14
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=e5fmtM
+      - TASK_TYPE=redis
+      - NAME=周仙琴
+      - REDIS_NAME=task:pq_redis_zxq
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker15:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker15
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=qx3zBD
+      - TASK_TYPE=redis
+      - NAME=王雪珂
+      - REDIS_NAME=task:pq_redis_wxk
+    networks:
+      - carry_net
+  worker16:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker16
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=bxsgNP
+      - TASK_TYPE=redis
+      - NAME=信欣
+      - REDIS_NAME=task:pq_redis_xx
+    networks:
+      - carry_net
+  worker17:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker17
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=qx3zBD
+      - TASK_TYPE=redis
+      - NAME=王雪珂
+      - REDIS_NAME=task:pq_redis_wxk
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+  worker18:
+    depends_on:
+      - worker1
+    image: pq
+    container_name: pq_worker18
+    restart: unless-stopped
+    environment:
+      - ENV=prod
+      - FS_SHEET=bxsgNP
+      - TASK_TYPE=redis
+      - NAME=信欣
+      - REDIS_NAME=task:pq_redis_xx
+    networks:
+      - carry_net
+    entrypoint: "./entrypoint.sh"
+networks:
+  carry_net:
+    name: carry_net
+

+ 5 - 0
entrypoint.sh

@@ -0,0 +1,5 @@
+#!/bin/sh
+
+service nscd start
+
+python /app/pq_data_handle.py

+ 53 - 0
pq_data_handle.py

@@ -0,0 +1,53 @@
+import json
+import os
+import time
+import uuid
+
+import schedule
+from loguru import logger
+
+from common.feishu_utils import Feishu
+from common.redis import get_carry_data, in_carry_video_data
+from pq_video.pq_video import PqViode
+
+ENV = os.getenv('ENV', 'dev')
+NAME = os.getenv('NAME')
+REDIS_NAME = os.getenv('REDIS_NAME')
+CACHE_DIR = '/app/cache/' if ENV == 'prod' else os.path.expanduser('~/Downloads/')
+
+def video_task_start():
+    logger.info(f"[+] {REDIS_NAME}任务开始redis获取")
+    while True:
+        data = get_carry_data(REDIS_NAME)
+        if not data:
+            return
+        try:
+            logger.info(f"[+] {NAME}任务开始,数据为{data}")
+            carry_video = PqViode()
+            mark = carry_video.main(json.loads(data), REDIS_NAME)
+            print(f"返回用户名: {mark}")
+            logger.info(f"[+] {NAME}处理一条成功")
+            continue
+        except Exception as e:
+            data = json.loads(data)
+            in_carry_video_data(REDIS_NAME, json.dumps(data, ensure_ascii=False, indent=4))
+            text = (
+                f"**负责人**: {data['name']}\n"
+                f"**内容**: {data}\n"
+                f"**失败信息**: 站内视频重发失败,等待重新处理\n"
+            )
+            Feishu.finish_bot(text,
+                              "https://open.feishu.cn/open-apis/bot/v2/hook/65bc5463-dee9-46d0-bc2d-ec6c49a8f3cd",
+                              "【 站内视频重发失败通知 】")
+            logger.error(f"[+] {data}处理失败,失败信息{e}")
+            continue
+
+def schedule_tasks():
+    schedule.every(6).minutes.do(video_task_start)
+
+if __name__ == '__main__':
+    schedule_tasks()  # 调用任务调度函数
+    while True:
+        schedule.run_pending()
+        time.sleep(1)  # 每秒钟检查一次
+    # video_task_start()

+ 0 - 0
pq_video/__init__.py


+ 102 - 0
pq_video/pq_video.py

@@ -0,0 +1,102 @@
+import json
+import re
+import time
+from datetime import datetime
+
+from loguru import logger
+
+from common.feishu_utils import Feishu
+from common.piaoquan import PQ
+from common.redis import in_carry_video_data
+from common.tag_video import Tag
+
+
+class PqViode:
+
+    def main(self, data, REDIS_NAME):
+        video_id = data['video_id']
+        title = data['title']
+        if not video_id:
+            return
+        logger.info(f"[+] {REDIS_NAME}的{video_id}开始获取视频地址")
+
+        video_path, cover_path, old_title = PQ.get_pq_oss(video_id)
+        if not title:
+            title = old_title
+        if not video_path:
+            in_carry_video_data(REDIS_NAME, json.dumps(data, ensure_ascii=False, indent=4))
+            logger.error(f"[+] {REDIS_NAME}的{video_id}没有获取到视频地址,等待重新处理")
+            return
+        n_ids = str(data["pq_ids"])
+        if ',' in n_ids:
+            n_id_list = n_ids.split(',')
+        else:
+            n_id_list = [n_ids]
+        pq_list = []
+        for n_id in n_id_list:
+            code = PQ.insert_piaoquantv(video_path, title, n_id, cover_path)
+            if not code:
+                logger.error(f"[+] {REDIS_NAME}的{data}写入票圈后台失败")
+                text = (
+                    f"**负责人**: {data['name']}\n"
+                    f"**内容**: {data}\n"
+                    f"**失败信息**: 视频写入票圈后台失败,视频ID{code}\n"
+                )
+                Feishu.finish_bot(text,
+                                  "https://open.feishu.cn/open-apis/bot/v2/hook/65bc5463-dee9-46d0-bc2d-ec6c49a8f3cd",
+                                  "【 搬运&改造效率工具失败通知 】")
+                continue
+
+            pq_list.append(code)
+            logger.info(f"[+] {REDIS_NAME}的{data}写入票圈成功,返回视频id{code}")
+            tags = ','.join(filter(None, [
+                data['pq_label'],
+                "站内重发"
+            ]))
+            tag_status = Tag.video_tag(code, str(tags))
+            if tag_status == 0:
+                logger.info(f"[+] {REDIS_NAME}的{data}写入标签成功,后台视频ID为{code}")
+            try:
+                current_time = datetime.now()
+                formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
+
+                pq_url = f'https://admin.piaoquantv.com/cms/post-detail/{code}/detail'  # 站内视频链接
+
+                values = [
+                    [
+                        str(code),
+                        str(n_id),
+                        formatted_time,
+                        "站内重发",
+                        # data["name"],
+                        # data["pq_ids"],
+                        data["pq_label"],
+                        # data["activate_data"],
+                        data["video_id"],
+                        data["title"],
+                        data["dt"],
+                        pq_url
+
+                ]
+                ]
+                name_to_sheet = {
+                    "范军": "bbf2a9",
+                    "鲁涛": "UpArtI",
+                    "余海涛": "wh13NV",
+                    "罗情": "8hhW6y",
+                    "刘诗雨": "OVfeDx",
+                    "周仙琴": "3zQn2q",
+                    "王雪珂": "qR49hQ",
+                    "信欣": "G3i38u"
+                }
+                name = re.sub(r"\s+", "", data.get("name", ""))
+                sheet = name_to_sheet.get(name)
+                Feishu.insert_columns("RyXxsjECmhmCFGt0x0mci1iBnmg", sheet, "ROWS", 1, 2)
+                time.sleep(0.5)
+                Feishu.update_values("RyXxsjECmhmCFGt0x0mci1iBnmg", sheet, "A2:Z2", values)
+                logger.info(f"[+] {REDIS_NAME}的{data}写入飞书成功")
+            except Exception as e:
+                logger.error(f"[+] {REDIS_NAME}的{data}写入飞书失败{e}")
+                pass
+
+        return

+ 34 - 0
pq_video_reids.py

@@ -0,0 +1,34 @@
+import datetime
+import os
+import time
+
+import schedule
+from loguru import logger
+from common.redis import  insert_carry_data
+
+ENV = os.getenv('ENV', 'dev')
+FS_SHEET = os.getenv('FS_SHEET')
+NAME = os.getenv('NAME')
+REDIS_NAME = os.getenv('REDIS_NAME')
+
+def bot_carry_data():
+    try:
+        dt = int(datetime.datetime.now().strftime('%Y%m%d%H'))
+        logger.info(f"[+] 开始获取{NAME},时区为{dt}")
+        count = insert_carry_data(dt, REDIS_NAME,FS_SHEET, NAME)
+        logger.info(f"[+] {NAME},时区为{dt}共获取{count}条")
+
+    except Exception as e:
+        logger.error(f"[+] 获取{NAME},时区为{dt}失败,失败信息{e}")
+
+def schedule_tasks():
+    schedule.every().hour.at(":05").do(bot_carry_data)
+
+
+if __name__ == "__main__":
+    schedule_tasks()  # 调用任务调度函数
+    while True:
+        schedule.run_pending()
+        time.sleep(1)  # 每秒钟检查一次
+    # bot_carry_data()
+

+ 11 - 0
requirements.txt

@@ -0,0 +1,11 @@
+aliyun-log-python-sdk==0.9.12
+google-generativeai==0.8.3
+loguru==0.7.2
+mutagen==1.47.0
+odps==3.5.1
+opencv-python==4.10.0.84
+oss2==2.19.1
+redis==5.1.1
+requests==2.32.3
+schedule==1.2.2
+pymysql==1.0.2