# -*- coding: utf-8 -*- # @Time: 2023/10/26 import json import os import random import sys import time from datetime import datetime import requests import urllib3 sys.path.append(os.getcwd()) from common.mq import MQ from common.common import Common from common.scheduling_db import MysqlHelper from common import AliyunLogger from common.public import get_config_from_mysql, download_rule from common.feishu import Feishu proxies = {"http": None, "https": None} class KanyikanRecommend: platform = "看一看-plus" strategy = "随机数据抓取" @classmethod def repeat_video(cls, log_type, crawler, video_id, env): sql = f""" select * from crawler_video where platform in ("{crawler}","{cls.platform}") and create_time>='2023-10-09' and out_video_id="{video_id}"; """ repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env) return len(repeat_video) @classmethod def insert_video_id(cls, log_type, crawler, video_id, env): insert_sql = f"""insert into crawler_kyk_video_id( kyk_video_id , status) values ("{video_id}",0)""" MysqlHelper.update_values(log_type, crawler, insert_sql, env, action='') @classmethod def get_videoList(cls, log_type, crawler, our_uid, rule_dict, env): mq = MQ(topic_name="topic_crawler_etl_" + env) try: session = Common.get_session(log_type, crawler, env) if session is None: time.sleep(1) cls.get_videoList(log_type, crawler, our_uid, rule_dict, env) sharesearchid = 0 for i in range(20): url = 'https://search.weixin.qq.com/cgi-bin/recwxa/recwxavideolist?' vid = random.choice( ["wxv_3183841422983217154", "wxv_2930758110737334272", "wxv_2988109621326512134", "wxv_2676332817823432706", "wxv_3176172124915433476", "wxv_2844480939899650049", "wxv_2801905452978274308", "wxv_2946787506342117382", "wxv_2935943471797125120", "wxv_2756464139115659264", "wxv_3174430452460453896", "wxv_3126758748858908674", "wxv_3182262442043621385", "wxv_3058491263710314497", "wxv_2952726055449051140", "wxv_3076106053748015108", "wxv_2074265064492040192", "wxv_2999570992006021122"]) channelid = random.choice( ["200201", "200", "208", "208201"]) switchnewuser = random.choice( ["0", "1"]) isFromUgc = random.choice( ["false", "true"]) switchprofile = random.choice( ["0", "1"]) subscene = random.choice( ["1089", "1074", "208", "1007", "1008"]) params = random.choice([{ 'session': session, "offset": 0, "wxaVersion": "3.17.12", "count": "10", "channelid": channelid, "scene": '310', "subscene": subscene, "clientVersion": '3.8.6', "sharesearchid": sharesearchid, "nettype": 'wifi', "switchprofile": switchprofile, "switchnewuser": switchnewuser, }, { "session": session, "wxaVersion": "3.17.8", "channelid": channelid, "vid": vid, "offset": 0, "count": "15", "scene": '310', "subscene": subscene, "model": "华为", "nettype": '4g', "clientVersion": '3.8.6', "sharesearchid": sharesearchid, "presearchid": "17530764723864413041", "sharesource": "0", "isFromUgc": isFromUgc, "ad": 0, "switchprofile": switchprofile, "switchnewuser": switchnewuser, }]) header = { 'Host': 'search.weixin.qq.com', 'Content-Type': 'application/json', 'X-WX-ClientVersion': '0x33050520', 'X-WECHAT-UIN': 'b2hfbTQ1WGNjSzQxemdfanpMSml1TEtfbEtsVQ==', 'Accept': '*/*', 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E217 MicroMessenger/6.8.0(0x16080000) NetType/WIFI Language/en Branch/Br_trunk MiniProgramEnv/Mac', 'Referer': 'https://servicewechat.com/wxbb9a805eb4f9533c/268/page-frame.html', 'Accept-Language': 'zh-cn' } urllib3.disable_warnings() response = requests.get(url=url, headers=header, params=params, proxies=proxies, verify=False) # print(response) if "data" not in response.text: Common.logger(log_type, crawler).info("获取视频list时,session过期,随机睡眠 31-50 秒") Common.logging(log_type, crawler, env, "获取视频list时,session过期,随机睡眠 31-50 秒") AliyunLogger.logging( code="2000", platform=crawler, mode=log_type, env=env, message=f"获取视频list时,session过期,随机睡眠 31-50 秒" ) # 如果返回空信息,则随机睡眠 31-40 秒 time.sleep(random.randint(31, 40)) cls.get_videoList(log_type, crawler, our_uid, rule_dict, env) elif "items" not in response.json()["data"]: Common.logger(log_type, crawler).info(f"get_feeds:{response.json()},随机睡眠 1-3 分钟") Common.logging(log_type, crawler, env, f"get_feeds:{response.json()},随机睡眠 1-3 分钟") AliyunLogger.logging( code="2000", platform=crawler, mode=log_type, env=env, message=f"get_feeds:{response.json()},随机睡眠 1-3 分钟" ) # 如果返回空信息,则随机睡眠 1-3 分钟 time.sleep(random.randint(60, 180)) cls.get_videoList(log_type, crawler, our_uid, rule_dict, env) feeds = response.json().get("data", {}).get("items", "") sharesearchid = response.json().get("searchid", {}) if feeds == "": Common.logger(log_type, crawler).info(f"feeds:{feeds}") Common.logging(log_type, crawler, env, f"feeds:{feeds}") return for i in range(len(feeds)): try: AliyunLogger.logging( code="1001", platform=crawler, mode=log_type, env=env, message='扫描到一条视频\n' ) video_title = feeds[i].get("title", "").strip().replace("\n", "") \ .replace("/", "").replace("\\", "").replace("\r", "") \ .replace(":", "").replace("*", "").replace("?", "") \ .replace("?", "").replace('"', "").replace("<", "") \ .replace(">", "").replace("|", "").replace(" ", "") \ .replace("&NBSP", "").replace(".", "。").replace(" ", "") \ .replace("'", "").replace("#", "").replace("Merge", "") publish_time_stamp = feeds[i].get("date", 0) publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp)) # 获取播放地址 if "videoInfo" not in feeds[i]: video_url = "" elif "mpInfo" in feeds[i]["videoInfo"]["videoCdnInfo"]: if len(feeds[i]["videoInfo"]["videoCdnInfo"]["mpInfo"]["urlInfo"]) > 2: video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["mpInfo"]["urlInfo"][2]["url"] else: video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["mpInfo"]["urlInfo"][0]["url"] elif "ctnInfo" in feeds[i]["videoInfo"]["videoCdnInfo"]: video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["ctnInfo"]["urlInfo"][0]["url"] else: video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["urlInfo"][0]["url"] video_id = feeds[i].get("videoId", "") videoId = "{}kyk_plus".format(video_id) playCount = int(feeds[i].get("playCount", 0)) shared_cnt = int(feeds[i].get("shared_cnt", 0)) video_dict = { "video_title": video_title, "video_id": videoId, "play_cnt": feeds[i].get("playCount", 0), "like_cnt": feeds[i].get("liked_cnt", 0), "comment_cnt": feeds[i].get("comment_cnt", 0), "share_cnt": feeds[i].get("shared_cnt", 0), "duration": feeds[i].get("mediaDuration", 0), "video_width": feeds[i].get("short_video_info", {}).get("width", 0), "video_height": feeds[i].get("short_video_info", {}).get("height", 0), "publish_time_stamp": publish_time_stamp, "publish_time_str": publish_time_str, "user_name": feeds[i].get("source", "").strip().replace("\n", ""), "user_id": feeds[i].get("openid", ""), "avatar_url": feeds[i].get("bizIcon", ""), "cover_url": feeds[i].get("thumbUrl", ""), "video_url": video_url, "session": session, } # 获取当前时间 current_time = datetime.now() formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S") for k, v in video_dict.items(): Common.logger(log_type, crawler).info(f"{k}:{v}") Common.logging(log_type, crawler, env, f"video_dict:{video_dict}") AliyunLogger.logging( code="1000", platform=crawler, mode=log_type, env=env, message=f"{video_dict}\n" ) video_percent = '%.2f' % (shared_cnt / playCount) if float(video_percent) < 0.1: Common.logger(log_type, crawler).info(f"分享/播放:{video_percent}\n") Common.logging(log_type, crawler, env, f"分享/播放:{video_percent}\n") AliyunLogger.logging( code="2004", platform=crawler, mode=log_type, env=env, message=f"不符合抓取条件,分享/播放:{video_percent}\n" ) values = [[ videoId, video_title, feeds[i].get("playCount", 0), feeds[i].get("liked_cnt", 0), feeds[i].get("comment_cnt", 0), feeds[i].get("shared_cnt", 0), feeds[i].get("mediaDuration", 0), publish_time_str, formatted_time, feeds[i].get("thumbUrl", ""), video_url, f"channelid:{channelid},switchnewuser:{switchnewuser},sharesearchid:{sharesearchid},isFromUgc:{isFromUgc},switchprofile:{switchprofile},subscene:{subscene}", "否", f"不符合抓取条件,分享/播放:{video_percent}" ]] Feishu.insert_columns('kanyikan', 'kanyikan', "zS0vxs", "ROWS", 1, 2) time.sleep(0.5) Feishu.update_values('kanyikan', 'kanyikan', "zS0vxs", "A2:Z2", values) continue if video_dict["video_id"] == "" or video_dict["video_title"] == "" or video_dict["video_url"] == "": Common.logger(log_type, crawler).info("无效视频\n") Common.logging(log_type, crawler, env, "无效视频\n") AliyunLogger.logging( code="2004", platform=crawler, mode=log_type, env=env, message=f"无效视频" ) values = [[ videoId, video_title, feeds[i].get("playCount", 0), feeds[i].get("liked_cnt", 0), feeds[i].get("comment_cnt", 0), feeds[i].get("shared_cnt", 0), feeds[i].get("mediaDuration", 0), publish_time_str, formatted_time, feeds[i].get("thumbUrl", ""), video_url, f"channelid:{channelid},switchnewuser:{switchnewuser},sharesearchid:{sharesearchid},isFromUgc:{isFromUgc},switchprofile:{switchprofile},subscene:{subscene}", "否", f"无效视频" ]] Feishu.insert_columns('kanyikan', 'kanyikan', "zS0vxs", "ROWS", 1, 2) time.sleep(0.5) Feishu.update_values('kanyikan', 'kanyikan', "zS0vxs", "A2:Z2", values) elif download_rule(log_type=log_type, crawler=crawler, video_dict=video_dict, rule_dict=rule_dict) is False: Common.logger(log_type, crawler).info("不满足抓取规则\n") Common.logging(log_type, crawler, env, "不满足抓取规则\n") AliyunLogger.logging( code="2004", platform=crawler, mode=log_type, env=env, message='不满足抓取规则\n' ) values = [[ videoId, video_title, feeds[i].get("playCount", 0), feeds[i].get("liked_cnt", 0), feeds[i].get("comment_cnt", 0), feeds[i].get("shared_cnt", 0), feeds[i].get("mediaDuration", 0), publish_time_str, formatted_time, feeds[i].get("thumbUrl", ""), video_url, f"channelid:{channelid},switchnewuser:{switchnewuser},sharesearchid:{sharesearchid},isFromUgc:{isFromUgc},switchprofile:{switchprofile},subscene:{subscene}", "否", f"不满足抓取规则" ]] Feishu.insert_columns('kanyikan', 'kanyikan', "zS0vxs", "ROWS", 1, 2) time.sleep(0.5) Feishu.update_values('kanyikan', 'kanyikan', "zS0vxs", "A2:Z2", values) elif any(str(word) if str(word) in video_dict["video_title"] else False for word in get_config_from_mysql(log_type=log_type, source=crawler, env=env, text="filter", action="")) is True: Common.logger(log_type, crawler).info('已中过滤词\n') Common.logging(log_type, crawler, env, '已中过滤词\n') AliyunLogger.logging( code="2004", platform=crawler, mode=log_type, env=env, message='已中过滤词\n' ) values = [[ videoId, video_title, feeds[i].get("playCount", 0), feeds[i].get("liked_cnt", 0), feeds[i].get("comment_cnt", 0), feeds[i].get("shared_cnt", 0), feeds[i].get("mediaDuration", 0), publish_time_str, formatted_time, feeds[i].get("thumbUrl", ""), video_url, f"channelid:{channelid},switchnewuser:{switchnewuser},sharesearchid:{sharesearchid},isFromUgc:{isFromUgc},switchprofile:{switchprofile},subscene:{subscene}", "否", f"已中过滤词" ]] Feishu.insert_columns('kanyikan', 'kanyikan', "zS0vxs", "ROWS", 1, 2) time.sleep(0.5) Feishu.update_values('kanyikan', 'kanyikan', "zS0vxs", "A2:Z2", values) elif cls.repeat_video(log_type, crawler, video_dict["video_id"], env) != 0: Common.logger(log_type, crawler).info('视频已下载\n') Common.logging(log_type, crawler, env, '视频已下载\n') AliyunLogger.logging( code="2002", platform=crawler, mode=log_type, env=env, message='视频已下载\n' ) values = [[ videoId, video_title, feeds[i].get("playCount", 0), feeds[i].get("liked_cnt", 0), feeds[i].get("comment_cnt", 0), feeds[i].get("shared_cnt", 0), feeds[i].get("mediaDuration", 0), publish_time_str, formatted_time, feeds[i].get("thumbUrl", ""), video_url, f"channelid:{channelid},switchnewuser:{switchnewuser},sharesearchid:{sharesearchid},isFromUgc:{isFromUgc},switchprofile:{switchprofile},subscene:{subscene}", "否", f"视频已下载" ]] Feishu.insert_columns('kanyikan', 'kanyikan', "zS0vxs", "ROWS", 1, 2) time.sleep(0.5) Feishu.update_values('kanyikan', 'kanyikan', "zS0vxs", "A2:Z2", values) else: video_dict["out_user_id"] = video_dict["user_id"] video_dict["platform"] = crawler video_dict["strategy"] = log_type video_dict["strategy_type"] = "data" video_dict["out_video_id"] = video_dict["video_id"] video_dict["width"] = video_dict["video_width"] video_dict["height"] = video_dict["video_height"] video_dict["crawler_rule"] = json.dumps(rule_dict) video_dict["user_id"] = our_uid video_dict["publish_time"] = video_dict["publish_time_str"] cls.insert_video_id(log_type, crawler, video_id, env) AliyunLogger.logging( code="1010", platform=crawler, mode=log_type, env=env, message=f"看一看video_id:{video_id}入库", ) values = [[ videoId, video_title, feeds[i].get("playCount", 0), feeds[i].get("liked_cnt", 0), feeds[i].get("comment_cnt", 0), feeds[i].get("shared_cnt", 0), feeds[i].get("mediaDuration", 0), publish_time_str, formatted_time, feeds[i].get("thumbUrl", ""), video_url, f"channelid:{channelid},switchnewuser:{switchnewuser},sharesearchid:{sharesearchid},isFromUgc:{isFromUgc},switchprofile:{switchprofile},subscene:{subscene}", "是", "" ]] Feishu.insert_columns('kanyikan', 'kanyikan', "zS0vxs", "ROWS", 1, 2) time.sleep(0.5) Feishu.update_values('kanyikan', 'kanyikan', "zS0vxs", "A2:Z2", values) mq.send_msg(video_dict) time.sleep(random.randint(10, 15)) except Exception as e: Common.logger(log_type, crawler).error(f"抓取单条视频异常:{e}\n") Common.logging(log_type, crawler, env, f"抓取单条视频异常:{e}\n") AliyunLogger.logging( code="3000", platform=crawler, mode=log_type, env=env, message=f"抓取单条视频异常:{e}\n" ) except Exception as e: Common.logger(log_type, crawler).error(f"抓取列表页时异常:{e}\n") Common.logging(log_type, crawler, env, f"抓取列表页时异常:{e}\n") AliyunLogger.logging( code="3000", platform=crawler, mode=log_type, env=env, message=f"抓取列表页时异常:{e}\n" ) if __name__ == "__main__": KanyikanRecommend.get_videoList( log_type="recommend", crawler="kanyikan", env="prod", rule_dict={'share_cnt': {'min': 300, 'max': 0}}, our_uid=64080779 )