kuaishou.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. import random
  2. import time
  3. import requests
  4. import json
  5. import urllib3
  6. from requests.adapters import HTTPAdapter
  7. from common import Feishu, Material, AliyunLogger
  8. from common.sql_help import sqlCollect
  9. from data_channel.data_help import dataHelp
  10. class KS:
  11. @classmethod
  12. def get_share_count(cls, v_id):
  13. url = "http://8.217.192.46:8889/crawler/kuai_shou/detail"
  14. payload = json.dumps({
  15. "content_id": v_id
  16. })
  17. headers = {
  18. 'Content-Type': 'application/json'
  19. }
  20. try:
  21. time.sleep(random.uniform(1, 10))
  22. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  23. response = response.json()
  24. if response["code"] == 0:
  25. data = response["data"]["data"]
  26. share_count = data.get("share_count")
  27. return int(share_count)
  28. except KeyError as e:
  29. return 0
  30. @classmethod
  31. def get_ks_url(cls, task_mark, url_id, number, mark, feishu_id, cookie_sheet, channel_id, name):
  32. list = []
  33. pcursor = ""
  34. url = "https://www.kuaishou.com/graphql"
  35. if not url_id:
  36. return
  37. for i in range(3):
  38. cookie = Material.get_cookie_data(feishu_id, cookie_sheet, channel_id)
  39. time.sleep(random.randint(1, 5))
  40. payload = json.dumps({
  41. "operationName": "visionProfilePhotoList",
  42. "variables": {
  43. "userId": url_id,
  44. "pcursor": pcursor,
  45. "page": "profile"
  46. },
  47. "query": "fragment photoContent on PhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment feedContent on Feed {\n type\n author {\n id\n name\n headerUrl\n following\n headerUrls {\n url\n __typename\n }\n __typename\n }\n photo {\n ...photoContent\n ...recoPhotoFragment\n __typename\n }\n canAddComment\n llsid\n status\n currentPcursor\n tags {\n type\n name\n __typename\n }\n __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n result\n llsid\n webPageArea\n feeds {\n ...feedContent\n __typename\n }\n hostName\n pcursor\n __typename\n }\n}\n"
  48. })
  49. headers = {
  50. 'accept': '*/*',
  51. 'content-type': 'application/json',
  52. 'Origin': 'https://www.kuaishou.com',
  53. 'Cookie': cookie,
  54. 'Accept-Language': 'zh-CN,zh;q=0.9',
  55. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
  56. 'Referer': f'https://www.kuaishou.com/profile/{url_id}',
  57. 'Accept-Encoding': 'gzip, deflate, br',
  58. 'Connection': 'keep-alive'
  59. }
  60. urllib3.disable_warnings()
  61. s = requests.session()
  62. s.mount('http://', HTTPAdapter(max_retries=3))
  63. s.mount('https://', HTTPAdapter(max_retries=3))
  64. # response = requests.request("POST", url, headers=headers, data=payload, timeout=10)
  65. try:
  66. response = s.post(url=url, headers=headers, data=payload, verify=False, timeout=10)
  67. response.close()
  68. if response.status_code != 200:
  69. return list
  70. elif response.status_code == 200 and "error_msg" in response.text:
  71. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  72. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  73. time.sleep(600)
  74. return list
  75. elif "visionProfilePhotoList" not in response.json()["data"]:
  76. if name == '快手品类账号':
  77. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  78. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  79. time.sleep(600)
  80. else:
  81. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', name)
  82. return list
  83. elif "feeds" not in response.json()["data"]["visionProfilePhotoList"]:
  84. if name == '快手品类账号':
  85. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  86. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  87. time.sleep(600)
  88. else:
  89. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', name)
  90. return list
  91. elif len(response.json()["data"]["visionProfilePhotoList"]["feeds"]) == 0:
  92. if name == '快手品类账号':
  93. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  94. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  95. time.sleep(600)
  96. else:
  97. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie使用频繁无法获取到数据,请及时更换', name)
  98. return list
  99. pcursor = response.json()['data']['visionProfilePhotoList']['pcursor']
  100. feeds = response.json()['data']['visionProfilePhotoList']['feeds']
  101. for i in range(len(feeds)):
  102. # try:
  103. # video_id = feeds[i].get("photo", {}).get("videoResource").get("h264", {}).get("videoId", "")
  104. # except KeyError:
  105. # video_id = feeds[i].get("photo", {}).get("videoResource").get("hevc", {}).get("videoId", "")
  106. # status = sqlCollect.is_used(task_mark, video_id, mark, channel_id)
  107. # if status:
  108. # continue
  109. video_id = feeds[i].get("photo", {}).get("id", "")
  110. status = sqlCollect.is_used(task_mark, video_id, mark, channel_id)
  111. old_title = feeds[i].get("photo", {}).get("caption")
  112. cover_url = feeds[i].get('photo', {}).get('coverUrl', "")
  113. video_url = feeds[i].get('photo', {}).get('photoUrl', "")
  114. view_count = int(feeds[i].get('photo', {}).get('viewCount', 0))
  115. realLikeCount = int(feeds[i].get('photo', {}).get('realLikeCount', 0))
  116. video_percent = '%.4f' % (share_count / view_count)
  117. duration = dataHelp.video_duration(video_url)
  118. log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}"
  119. # log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,view_count:{view_count},,duration:{duration}"
  120. AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data)
  121. # if status:
  122. # AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data)
  123. # continue
  124. share_count = cls.get_share_count(video_id)
  125. special = float(0.001)
  126. if float(video_percent) < special:
  127. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/浏览小于0.001", "2003", log_data)
  128. continue
  129. if share_count < 500:
  130. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于500", "2003", log_data)
  131. continue
  132. if duration < 30 or duration > 720:
  133. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data)
  134. continue
  135. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent, "old_title": old_title}
  136. list.append(all_data)
  137. AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data)
  138. if len(list) == int(number):
  139. return list
  140. except Exception as exc:
  141. # Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  142. # Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  143. return list
  144. return list
  145. if __name__ == '__main__':
  146. KS.get_ks_url("1","3xzicxg2nandemc",1,"1",'WuoQsVFXChVMK4tDHqLcwLWgnjh','Sjk8p8','','')