kuaishou.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164
  1. import random
  2. import time
  3. import requests
  4. import json
  5. import urllib3
  6. from requests.adapters import HTTPAdapter
  7. from common import Feishu, Material, Common, AliyunLogger
  8. from common.sql_help import sqlCollect
  9. from data_channel.data_help import dataHelp
  10. class KS:
  11. @classmethod
  12. def get_share_count(cls, v_id):
  13. url = "http://8.217.192.46:8889/crawler/kuai_shou/detail"
  14. payload = json.dumps({
  15. "content_id": v_id
  16. })
  17. headers = {
  18. 'Content-Type': 'application/json'
  19. }
  20. for i in range(5):
  21. try:
  22. time.sleep(2)
  23. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  24. response = response.json()
  25. if response["code"] == 0:
  26. data = response["data"]["data"]
  27. share_count = data.get("share_count")
  28. return int(share_count)
  29. except KeyError as e:
  30. Common.logger("ks").info(f"获取分享数据失败:{e}\n")
  31. return 0
  32. @classmethod
  33. def get_ks_url(cls, task_mark, url_id, number, mark, feishu_id, cookie_sheet, channel_id, name):
  34. list = []
  35. pcursor = ""
  36. url = "https://www.kuaishou.com/graphql"
  37. for i in range(3):
  38. cookie = Material.get_cookie_data(feishu_id, cookie_sheet, channel_id)
  39. time.sleep(random.randint(1, 5))
  40. payload = json.dumps({
  41. "operationName": "visionProfilePhotoList",
  42. "variables": {
  43. "userId": url_id,
  44. "pcursor": pcursor,
  45. "page": "profile"
  46. },
  47. "query": "fragment photoContent on PhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment feedContent on Feed {\n type\n author {\n id\n name\n headerUrl\n following\n headerUrls {\n url\n __typename\n }\n __typename\n }\n photo {\n ...photoContent\n ...recoPhotoFragment\n __typename\n }\n canAddComment\n llsid\n status\n currentPcursor\n tags {\n type\n name\n __typename\n }\n __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n result\n llsid\n webPageArea\n feeds {\n ...feedContent\n __typename\n }\n hostName\n pcursor\n __typename\n }\n}\n"
  48. })
  49. headers = {
  50. 'accept': '*/*',
  51. 'content-type': 'application/json',
  52. 'Origin': 'https://www.kuaishou.com',
  53. 'Cookie': cookie,
  54. 'Accept-Language': 'zh-CN,zh;q=0.9',
  55. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
  56. 'Referer': f'https://www.kuaishou.com/profile/{url_id}',
  57. 'Accept-Encoding': 'gzip, deflate, br',
  58. 'Connection': 'keep-alive'
  59. }
  60. urllib3.disable_warnings()
  61. s = requests.session()
  62. s.mount('http://', HTTPAdapter(max_retries=3))
  63. s.mount('https://', HTTPAdapter(max_retries=3))
  64. # response = requests.request("POST", url, headers=headers, data=payload, timeout=10)
  65. try:
  66. response = s.post(url=url, headers=headers, data=payload, verify=False, timeout=10)
  67. response.close()
  68. if response.status_code != 200:
  69. return list
  70. elif "visionProfilePhotoList" not in response.json()["data"]:
  71. if name == '快手品类账号':
  72. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  73. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'抖音-{name}cookie过期,请及时更换', '刘兆恒')
  74. else:
  75. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', name)
  76. time.sleep(900)
  77. continue
  78. elif "feeds" not in response.json()["data"]["visionProfilePhotoList"]:
  79. if name == '快手品类账号':
  80. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  81. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'抖音-{name}cookie过期,请及时更换', '刘兆恒')
  82. else:
  83. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', name)
  84. time.sleep(900)
  85. continue
  86. elif len(response.json()["data"]["visionProfilePhotoList"]["feeds"]) == 0:
  87. if name == '快手品类账号':
  88. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  89. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'抖音-{name}cookie过期,请及时更换', '刘兆恒')
  90. else:
  91. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie使用频繁无法获取到数据,请及时更换', name)
  92. time.sleep(900)
  93. continue
  94. pcursor = response.json()['data']['visionProfilePhotoList']['pcursor']
  95. feeds = response.json()['data']['visionProfilePhotoList']['feeds']
  96. for i in range(len(feeds)):
  97. # try:
  98. # video_id = feeds[i].get("photo", {}).get("videoResource").get("h264", {}).get("videoId", "")
  99. # except KeyError:
  100. # video_id = feeds[i].get("photo", {}).get("videoResource").get("hevc", {}).get("videoId", "")
  101. # status = sqlCollect.is_used(task_mark, video_id, mark, channel_id)
  102. # if status:
  103. # continue
  104. video_id = feeds[i].get("photo", {}).get("id", "")
  105. status = sqlCollect.is_used(task_mark, video_id, mark, channel_id)
  106. share_count = cls.get_share_count(video_id)
  107. old_title = feeds[i].get("photo", {}).get("caption")
  108. cover_url = feeds[i].get('photo', {}).get('coverUrl', "")
  109. video_url = feeds[i].get('photo', {}).get('photoUrl', "")
  110. view_count = int(feeds[i].get('photo', {}).get('viewCount', 0))
  111. realLikeCount = int(feeds[i].get('photo', {}).get('realLikeCount', 0))
  112. video_percent = '%.4f' % (share_count / view_count)
  113. duration = dataHelp.video_duration(video_url)
  114. log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}"
  115. # log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,view_count:{view_count},,duration:{duration}"
  116. AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data)
  117. Common.logger("ks").info(
  118. f"扫描:{task_mark},用户主页id:{url_id},视频id{video_id} ,播放数:{view_count} ,分享数:{share_count},时长:{duration} ")
  119. # if status:
  120. # AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data)
  121. # continue
  122. special = float(0.001)
  123. if float(video_percent) < special:
  124. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/浏览小于0.001", "2003", log_data)
  125. Common.logger("ks").info(
  126. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,播放数:{view_count} ,分享数:{share_count},时长:{duration} ")
  127. continue
  128. if share_count < 500:
  129. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于500", "2003", log_data)
  130. Common.logger("ks").info(
  131. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,播放数:{view_count} ,分享数:{share_count},时长:{duration} ")
  132. continue
  133. if duration < 30 or duration > 720:
  134. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data)
  135. Common.logger("ks").info(
  136. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,播放数:{view_count} ,分享数:{share_count},时长:{duration} ")
  137. continue
  138. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent, "old_title": old_title}
  139. list.append(all_data)
  140. AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data)
  141. if len(list) == int(number):
  142. Common.logger(mark).info(f"获取快手视频总数:{len(list)}\n")
  143. return list
  144. except Exception as exc:
  145. Common.logger("ks").warning(f"{name}的快手获取数据失败:{exc}\n")
  146. return list
  147. return list
  148. if __name__ == '__main__':
  149. KS.get_share_count("5188428384967044201")