kuaishou.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. import random
  2. import time
  3. import requests
  4. import json
  5. import urllib3
  6. from requests.adapters import HTTPAdapter
  7. from common import Feishu, Material, AliyunLogger
  8. from common.sql_help import sqlCollect
  9. from data_channel.data_help import dataHelp
  10. class KS:
  11. @classmethod
  12. def get_share_count(cls, v_id):
  13. url = "http://8.217.192.46:8889/crawler/kuai_shou/detail"
  14. payload = json.dumps({
  15. "content_id": v_id
  16. })
  17. headers = {
  18. 'Content-Type': 'application/json'
  19. }
  20. for i in range(5):
  21. try:
  22. time.sleep(2)
  23. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  24. response = response.json()
  25. if response["code"] == 0:
  26. data = response["data"]["data"]
  27. share_count = data.get("share_count")
  28. return int(share_count)
  29. except KeyError as e:
  30. continue
  31. return 0
  32. @classmethod
  33. def get_ks_url(cls, task_mark, url_id, number, mark, feishu_id, cookie_sheet, channel_id, name):
  34. list = []
  35. pcursor = ""
  36. url = "https://www.kuaishou.com/graphql"
  37. for i in range(3):
  38. cookie = Material.get_cookie_data(feishu_id, cookie_sheet, channel_id)
  39. time.sleep(random.randint(1, 5))
  40. payload = json.dumps({
  41. "operationName": "visionProfilePhotoList",
  42. "variables": {
  43. "userId": url_id,
  44. "pcursor": pcursor,
  45. "page": "profile"
  46. },
  47. "query": "fragment photoContent on PhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment recoPhotoFragment on recoPhotoEntity {\n __typename\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n commentCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n riskTagContent\n riskTagUrl\n}\n\nfragment feedContent on Feed {\n type\n author {\n id\n name\n headerUrl\n following\n headerUrls {\n url\n __typename\n }\n __typename\n }\n photo {\n ...photoContent\n ...recoPhotoFragment\n __typename\n }\n canAddComment\n llsid\n status\n currentPcursor\n tags {\n type\n name\n __typename\n }\n __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n result\n llsid\n webPageArea\n feeds {\n ...feedContent\n __typename\n }\n hostName\n pcursor\n __typename\n }\n}\n"
  48. })
  49. headers = {
  50. 'accept': '*/*',
  51. 'content-type': 'application/json',
  52. 'Origin': 'https://www.kuaishou.com',
  53. 'Cookie': cookie,
  54. 'Accept-Language': 'zh-CN,zh;q=0.9',
  55. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
  56. 'Referer': f'https://www.kuaishou.com/profile/{url_id}',
  57. 'Accept-Encoding': 'gzip, deflate, br',
  58. 'Connection': 'keep-alive'
  59. }
  60. urllib3.disable_warnings()
  61. s = requests.session()
  62. s.mount('http://', HTTPAdapter(max_retries=3))
  63. s.mount('https://', HTTPAdapter(max_retries=3))
  64. # response = requests.request("POST", url, headers=headers, data=payload, timeout=10)
  65. try:
  66. response = s.post(url=url, headers=headers, data=payload, verify=False, timeout=10)
  67. response.close()
  68. if response.status_code != 200:
  69. return list
  70. elif response.status_code == 200 and "error_msg" in response.text:
  71. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  72. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  73. return list
  74. elif "visionProfilePhotoList" not in response.json()["data"]:
  75. if name == '快手品类账号':
  76. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  77. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  78. else:
  79. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', name)
  80. return list
  81. elif "feeds" not in response.json()["data"]["visionProfilePhotoList"]:
  82. if name == '快手品类账号':
  83. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  84. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  85. else:
  86. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', name)
  87. return list
  88. elif len(response.json()["data"]["visionProfilePhotoList"]["feeds"]) == 0:
  89. if name == '快手品类账号':
  90. Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  91. Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  92. else:
  93. Feishu.bot(mark, '机器自动改造消息通知', f'快手-{name}cookie使用频繁无法获取到数据,请及时更换', name)
  94. return list
  95. pcursor = response.json()['data']['visionProfilePhotoList']['pcursor']
  96. feeds = response.json()['data']['visionProfilePhotoList']['feeds']
  97. for i in range(len(feeds)):
  98. # try:
  99. # video_id = feeds[i].get("photo", {}).get("videoResource").get("h264", {}).get("videoId", "")
  100. # except KeyError:
  101. # video_id = feeds[i].get("photo", {}).get("videoResource").get("hevc", {}).get("videoId", "")
  102. # status = sqlCollect.is_used(task_mark, video_id, mark, channel_id)
  103. # if status:
  104. # continue
  105. video_id = feeds[i].get("photo", {}).get("id", "")
  106. status = sqlCollect.is_used(task_mark, video_id, mark, channel_id)
  107. old_title = feeds[i].get("photo", {}).get("caption")
  108. cover_url = feeds[i].get('photo', {}).get('coverUrl', "")
  109. video_url = feeds[i].get('photo', {}).get('photoUrl', "")
  110. view_count = int(feeds[i].get('photo', {}).get('viewCount', 0))
  111. realLikeCount = int(feeds[i].get('photo', {}).get('realLikeCount', 0))
  112. video_percent = '%.4f' % (share_count / view_count)
  113. duration = dataHelp.video_duration(video_url)
  114. log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}"
  115. # log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,view_count:{view_count},,duration:{duration}"
  116. AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data)
  117. # if status:
  118. # AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data)
  119. # continue
  120. share_count = cls.get_share_count(video_id)
  121. special = float(0.001)
  122. if float(video_percent) < special:
  123. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/浏览小于0.001", "2003", log_data)
  124. continue
  125. if share_count < 500:
  126. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于500", "2003", log_data)
  127. continue
  128. if duration < 30 or duration > 720:
  129. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data)
  130. continue
  131. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent, "old_title": old_title}
  132. list.append(all_data)
  133. AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data)
  134. if len(list) == int(number):
  135. return list
  136. except Exception as exc:
  137. # Feishu.bot("wangxueke", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', 'wangxueke')
  138. # Feishu.bot("liuzhaoheng", '机器自动改造消息通知', f'快手-{name}cookie过期,请及时更换', '刘兆恒')
  139. return list
  140. return list
  141. if __name__ == '__main__':
  142. KS.get_ks_url("1","3xzicxg2nandemc",1,"1",'WuoQsVFXChVMK4tDHqLcwLWgnjh','Sjk8p8','','')