sph_nrxs.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. import random
  2. import time
  3. import requests
  4. import json
  5. import re
  6. from common import Material, Feishu, Common
  7. from common.sql_help import sqlCollect
  8. class SphNrxs:
  9. @classmethod
  10. def get_sph_data(cls, user, nick_name, uid):
  11. url = "http://61.48.133.26:30001/FinderGetUpMasterNextPage"
  12. count = 1
  13. headers = {
  14. 'Content-Type': 'application/json'
  15. }
  16. payload = json.dumps({
  17. "username": user,
  18. "last_buffer": ""
  19. })
  20. response = requests.request("POST", url, headers=headers, data=payload)
  21. time.sleep(random.randint(1, 5))
  22. Common.logger("sph_nrxs").info(f"{user}获取第{count}页视频")
  23. count += 1
  24. if response.text == "" or response.text == None:
  25. return
  26. res_json = response.json()
  27. try:
  28. if len(res_json["DownloadAddress"]) == 0 or res_json["DownloadAddress"] == "" or res_json[
  29. "DownloadAddress"] == None:
  30. return
  31. except:
  32. pass
  33. if "objectId" not in response.text or response.status_code != 200:
  34. return
  35. if len(res_json["UpMasterHomePage"]) == 0:
  36. return
  37. if not res_json["UpMasterHomePage"]:
  38. return
  39. try:
  40. for obj in res_json["UpMasterHomePage"]:
  41. Common.logger("sph_crawling").info(f"{user}扫描到一条数据")
  42. objectId = obj['objectId']
  43. object_id = sqlCollect.sph_data_info_v_id(objectId, "视频号")
  44. if object_id:
  45. continue
  46. objectNonceId = obj['objectNonceId']
  47. url1 = "http://61.48.133.26:30001/GetFinderDownloadAddress"
  48. payload = json.dumps({
  49. "objectId": objectId,
  50. "objectNonceId": objectNonceId
  51. })
  52. headers = {
  53. 'Content-Type': 'text/plain'
  54. }
  55. response = requests.request("POST", url1, headers=headers, data=payload)
  56. time.sleep(random.randint(0, 1))
  57. video_obj = response.json()
  58. video_url = video_obj.get('DownloadAddress')
  59. if len(video_url) == 0:
  60. continue
  61. duration = video_obj.get('play_len')
  62. # cover = video_obj.get('thumb_url')
  63. share_cnt = int(obj['forward_count']) # 分享
  64. like_cnt = int(obj['like_count']) # 点赞
  65. # user_name = obj['username'] # 用户名标示
  66. nick_name = obj['nickname'] # 用户名
  67. # comment_count = obj['comment_count'] # 评论数
  68. # fav_count = obj['fav_count'] # 大拇指点赞数
  69. values = [
  70. [
  71. uid,
  72. nick_name,
  73. like_cnt,
  74. share_cnt,
  75. duration,
  76. video_url
  77. ]
  78. ]
  79. Feishu.insert_columns("UBvisMdE7hkI6rtIfzycCtdsnWM", '3476ab', "ROWS", 1, 2)
  80. time.sleep(0.5)
  81. Feishu.update_values("UBvisMdE7hkI6rtIfzycCtdsnWM", '3476ab', "A2:Z2", values)
  82. Common.logger("sph_nrxs").info(f"{nick_name}符合规则")
  83. except Exception as e:
  84. Common.logger("sph_nrxs").info(f"{user}异常,异常信息{e}")
  85. return
  86. @classmethod
  87. def get_nrxs_list(cls, uid):
  88. list = []
  89. cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "视频号加热")
  90. url = "http://8.217.190.241:8888/crawler/wei_xin/shi_pin_hao/jia_re"
  91. payload = json.dumps({
  92. "account_name": uid,
  93. "category": "推荐",
  94. "cookie": cookie
  95. })
  96. headers = {
  97. 'Content-Type': 'application/json'
  98. }
  99. try:
  100. time.sleep(2)
  101. response = requests.request("POST", url, headers=headers, data=payload)
  102. response = response.json()
  103. code = response['code']
  104. if code == 0:
  105. data_list = response['data']['data']
  106. if data_list:
  107. for data in data_list:
  108. nick_name = data['nickName'] # 用户名
  109. user_name = data['username'] # 用户v2
  110. data_dict = {"nick_name": nick_name, "user_name": user_name}
  111. cls.get_sph_data(user_name, nick_name, uid)
  112. list.append(data_dict)
  113. return list
  114. else:
  115. Feishu.bot("xinxin", '视频号加热提醒', f'cookie 失效了,请即使更换', 'xinxin')
  116. return None
  117. except Exception as e:
  118. Feishu.bot("xinxin", '视频号加热提醒', f'cookie 失效了,请即使更换', 'xinxin')
  119. Common.logger("feishu").error(f"视频号加热bot异常:{e}\n")
  120. return None
  121. @classmethod
  122. def sph_nrxs_data(cls):
  123. user = sqlCollect.get_machine_making_reflux("视频号", "单点视频")
  124. if user == None:
  125. return
  126. user = [item[0] for item in user]
  127. for uid in user:
  128. if re.match(r'^[A-Za-z0-9]+$', uid):
  129. # 匹配成功,进行下一次循环
  130. continue
  131. list = cls.get_nrxs_list(uid)
  132. print(list)
  133. if __name__ == '__main__':
  134. SphNrxs.sph_nrxs_data()