# import time # # import requests # import json # # from common import Common, AliyunLogger, Feishu, Material # from common.sql_help import sqlCollect # from common.userAgent import get_random_user_agent # # # class KsPcKeyword: # @classmethod # def get_key_word(cls, keyword, task_mark, mark, channel_id, name, task): # list = [] # url = "https://www.kuaishou.com/graphql" # # payload = json.dumps({ # "operationName": "visionSearchPhoto", # "variables": { # "keyword": keyword, # "pcursor": "", # "page": "search" # }, # "query": "fragment photoContent on PhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment feedContent on Feed {\n type\n author {\n id\n name\n headerUrl\n following\n headerUrls {\n url\n __typename\n }\n __typename\n }\n photo {\n ...photoContent\n ...recoPhotoFragment\n __typename\n }\n canAddComment\n llsid\n status\n currentPcursor\n tags {\n type\n name\n __typename\n }\n __typename\n}\n\nquery visionSearchPhoto($keyword: String, $pcursor: String, $searchSessionId: String, $page: String, $webPageArea: String) {\n visionSearchPhoto(keyword: $keyword, pcursor: $pcursor, searchSessionId: $searchSessionId, page: $page, webPageArea: $webPageArea) {\n result\n llsid\n webPageArea\n feeds {\n ...feedContent\n __typename\n }\n searchSessionId\n pcursor\n aladdinBanner {\n imgUrl\n link\n __typename\n }\n __typename\n }\n}\n" # }) # cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "快手搜索-cookie") # headers = { # 'Accept-Language': 'zh-CN,zh;q=0.9', # 'Cache-Control': 'no-cache', # 'Connection': 'keep-alive', # 'Origin': 'https://www.kuaishou.com', # 'Pragma': 'no-cache', # 'User-Agent': get_random_user_agent("pc"), # 'accept': '*/*', # 'content-type': 'application/json', # 'Cookie': cookie # } # try: # time.sleep(3) # # 代理信息 # proxy = "http://spkbt3wnzw:cx6R=v5mQuBgqsQ4o7@cn.visitxiangtan.com:30000" # proxies = { # "http": proxy, # "https": proxy # } # response = requests.request("POST", url, headers=headers, data=payload, proxies=proxies) # text = response.text # if text: # response_dict = json.loads(text) # result = response_dict.get('result', None) # if result: # log_type = ['liukunyu', 'wangxueke', 'xinxin'] # mark_name = ['刘坤宇', '王雪珂', '信欣'] # Feishu.bot(log_type, '快手关键词搜索', f'快手关键词搜索cookie过期,请及时更换', mark_name) # time.sleep(10) # return list # response = response.json() # data_list = response['data']['visionSearchPhoto']['feeds'] # for data in data_list: # data = data['photo'] # photo_id = data["id"] # status = sqlCollect.is_used(task_mark, photo_id, mark, channel_id) # # view_count = data["viewCount"] if "viewCount" in data and data["viewCount"] else 0 # like_count = data["likeCount"] if "likeCount" in data and data["likeCount"] else 0 # like_count = cls.convert_to_number(like_count) # video_percent = '%.4f' % (int(like_count) / int(view_count)) # special = 0.015 # old_title = data["caption"] # 标题 # duration = data["duration"] # duration = int(duration) / 1000 # video_url = data["photoUrl"] # image_url = data["coverUrl"] # log_data = f"user:{keyword},,video_id:{photo_id},,video_url:{video_url},original_title:{old_title},,like_count:{like_count},,view_count:{view_count},,duration:{duration}" # AliyunLogger.logging(channel_id, name, keyword, photo_id, "扫描到一条视频", "2001", log_data) # if status: # AliyunLogger.logging(channel_id, name, keyword, photo_id, "该视频已改造过", "2001", log_data) # continue # if int(view_count) < 1000: # AliyunLogger.logging(channel_id, name, keyword, photo_id, f"不符合规则:浏览小于1000", "2003", log_data) # Common.logger("ks-key-word").info( # f"不符合规则:{task_mark},用户主页id:{keyword},视频id{photo_id} ,浏览:{view_count},浏览{view_count} ,时长:{int(duration)} ") # continue # if float(video_percent) < special: # AliyunLogger.logging(channel_id, name, keyword, photo_id, f"不符合规则:点赞/浏览{special}", "2003", log_data) # Common.logger("ks-key-word").info( # f"不符合规则:{task_mark},用户主页id:{keyword},视频id{photo_id} ,浏览:{view_count},浏览{view_count} ,时长:{int(duration)} ") # continue # if int(duration) < 30 or int(duration) > 600: # AliyunLogger.logging(channel_id, name, keyword, photo_id, # f"不符合规则:时长不符合规则大于600秒/小于30秒", "2003", # log_data) # # Common.logger("ks-key-word").info( # f"不符合规则:{task_mark},用户主页id:{keyword},视频id{photo_id} ,浏览:{view_count},浏览{view_count} ,时长:{int(duration)} ") # continue # AliyunLogger.logging(channel_id, name, keyword, photo_id, "符合规则等待改造", "2004", log_data) # all_data = {"video_id": photo_id, "cover": image_url, "video_url": video_url, # "rule": '', # "old_title": old_title} # list.append(all_data) # return list # except Exception as exc: # Common.logger("ks-key-word").info(f"快手搜索词{keyword}获取失败{exc}\n") # return list # # @classmethod # def convert_to_number(cls, value): # if value.endswith("万"): # return float(value[:-1]) * 10000 # 去掉“万”并乘以 10000 # return int(value) # 处理其他格式 # # # if __name__ == '__main__': # keyword = '毛主席故居' # task_mark = '1' # mark = 'pl-gjc' # channel_id = '快手搜索' # name = '1' # task = {'combo': ['最新发布', '近1日', '1分钟内']} # KsPcKeyword.get_key_word(keyword, task_mark, mark, channel_id, name, task)