sph_jr_nrxs.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333
  1. import random
  2. import time
  3. from datetime import datetime
  4. import requests
  5. import json
  6. import re
  7. from common import Material, Feishu
  8. from common.sql_help import sqlCollect
  9. class SphNrxs:
  10. """创建票圈账号"""
  11. @classmethod
  12. def insert_number(cls, mid, tag_id):
  13. for i in range(3):
  14. url = "https://admin.piaoquantv.com/manager/crawler/v3/user/save"
  15. payload = {
  16. "source": "jiqizidonggaizao",
  17. "mode": "author",
  18. "modeValue": "",
  19. "modeBoard": "",
  20. "recomStatus": -7,
  21. "appRecomStatus": -7,
  22. "autoAuditStatus": 0,
  23. "tag": f"7592,452,8776,{tag_id}",
  24. "contentCategory": 0,
  25. "link": str(mid)
  26. }
  27. cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "票圈后台-cookie")
  28. headers = {
  29. 'content-length': '0',
  30. 'cookie': cookie,
  31. 'origin': 'https://admin.piaoquantv.com',
  32. 'priority': 'u=1, i',
  33. 'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="126", "Google Chrome";v="126"',
  34. 'sec-ch-ua-mobile': '?0',
  35. 'sec-ch-ua-platform': '"macOS"'
  36. }
  37. response = requests.request("POST", url, headers=headers, json=payload)
  38. response = response.json()
  39. code = response["code"]
  40. if code == 0:
  41. print("添加账号成功")
  42. time.sleep(1)
  43. url = "https://admin.piaoquantv.com/manager/crawler/v3/user/list"
  44. payload = {
  45. "pageNum": 1,
  46. "pageSize": 20
  47. }
  48. response = requests.request("POST", url, headers=headers, json=payload)
  49. response = response.json()
  50. list = response["content"]['list']
  51. link = list[0]["link"]
  52. if link == str(mid):
  53. print("获取站内账号ID成功")
  54. return list[0]["uid"]
  55. else:
  56. if code == '10010':
  57. return None
  58. Feishu.bot("xinxin", '票圈后台提醒', f'票圈后台cookie 失效了,请及时更换', 'xinxin')
  59. """腾讯互选平台通过appid获取观众画像"""
  60. @classmethod
  61. def get_hx(cls, aid):
  62. url = "http://8.217.190.241:8888/crawler/wei_xin/shi_pin_hao/hu_xuan_detail"
  63. cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "腾讯互选平台-cookie")
  64. account_id = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "腾讯互选平台-account_id")
  65. payload = json.dumps({
  66. "account_id": aid,
  67. "uid": str(account_id),
  68. "cookie": cookie
  69. })
  70. headers = {
  71. 'Content-Type': 'application/json'
  72. }
  73. response = requests.request("POST", url, headers=headers, data=payload)
  74. response = response.json()
  75. ret = response['code']
  76. if ret == 0:
  77. data = response['data']['data']
  78. if data:
  79. age_ranges = ['<18 岁', '18 ~ 24 岁', '25 ~ 29 岁', '30 ~ 39 岁', '40 ~ 49 岁', '>50 岁']
  80. viewer_portrait = data['viewer_portrait'] # 观众画像
  81. # 找到占比最大的项
  82. viewer_max_age_range = max(
  83. (item for item in viewer_portrait if item['name'] in age_ranges),
  84. key=lambda x: float(x['percentage'].strip('%'))
  85. )
  86. if viewer_max_age_range['name'] != '>50 岁':
  87. return "2"
  88. fans_portrait = data['fans_portrait'] # 粉丝画像
  89. # 找到占比最大的项
  90. fans_max_age_range = max(
  91. (item for item in fans_portrait if item['name'] in age_ranges),
  92. key=lambda x: float(x['percentage'].strip('%'))
  93. )
  94. if fans_max_age_range['name'] != '>50 岁':
  95. return "3"
  96. return "0"
  97. else:
  98. Feishu.bot("xinxin", '腾讯互选平台提醒', f'腾讯互选平台cookie 失效了,请及时更换', 'xinxin')
  99. return None
  100. """腾讯互选平台通过搜索获取appid"""
  101. @classmethod
  102. def get_hxpt_appid(cls, user):
  103. url = "https://huxuan.qq.com/cgi-bin/advertiser/finder_publisher/search"
  104. cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "腾讯互选平台-cookie")
  105. account_id = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "腾讯互选平台-account_id")
  106. payload = json.dumps({
  107. "keyword": user,
  108. "page": {
  109. "no": 1,
  110. "size": 50
  111. }
  112. })
  113. headers = {
  114. 'Accept': 'application/json, text/plain, */*',
  115. 'Accept-Language': 'zh-CN,zh;q=0.9',
  116. 'Cache-Control': 'no-cache',
  117. 'Connection': 'keep-alive',
  118. 'Content-Type': 'application/json',
  119. 'Cookie': cookie,
  120. 'Origin': 'https://huxuan.qq.com',
  121. 'Pragma': 'no-cache',
  122. 'Referer': 'https://huxuan.qq.com/trade/selection/46251713/selection_list?type=finder-trade',
  123. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36',
  124. 'account_id': str(account_id),
  125. 'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
  126. 'sec-ch-ua-mobile': '?0',
  127. 'sec-ch-ua-platform': '"macOS"'
  128. }
  129. for i in range(3):
  130. response = requests.request("POST", url, headers=headers, data=payload)
  131. response = response.json()
  132. ret = response['ret']
  133. if ret == 0:
  134. try:
  135. appid = response['data']['item'][0]['appid']
  136. if appid:
  137. return appid
  138. except Exception as e:
  139. continue
  140. else:
  141. Feishu.bot("xinxin", '腾讯互选平台提醒', f'腾讯互选平台cookie 失效了,请及时更换', 'xinxin')
  142. return None
  143. return None
  144. """获取用户主页是否符合规则"""
  145. @classmethod
  146. def get_sph_data(cls, user, uid):
  147. url = "http://61.48.133.26:30001/FinderGetUpMasterNextPage"
  148. headers = {
  149. 'Content-Type': 'application/json'
  150. }
  151. payload = json.dumps({
  152. "username": user,
  153. "last_buffer": ""
  154. })
  155. response = requests.request("POST", url, headers=headers, data=payload)
  156. time.sleep(random.randint(1, 5))
  157. if response.text == "" or response.text == None:
  158. return
  159. res_json = response.json()
  160. try:
  161. if len(res_json["DownloadAddress"]) == 0 or res_json["DownloadAddress"] == "" or res_json[
  162. "DownloadAddress"] == None:
  163. return
  164. except:
  165. pass
  166. if "objectId" not in response.text or response.status_code != 200:
  167. return
  168. if len(res_json["UpMasterHomePage"]) == 0:
  169. return
  170. if not res_json["UpMasterHomePage"]:
  171. return
  172. try:
  173. for obj in res_json["UpMasterHomePage"]:
  174. objectId = obj['objectId']
  175. object_id = sqlCollect.sph_data_info_v_id(objectId, "视频号")
  176. if object_id:
  177. continue
  178. objectNonceId = obj['objectNonceId']
  179. url1 = "http://61.48.133.26:30001/GetFinderDownloadAddress"
  180. payload = json.dumps({
  181. "objectId": objectId,
  182. "objectNonceId": objectNonceId
  183. })
  184. headers = {
  185. 'Content-Type': 'text/plain'
  186. }
  187. response = requests.request("POST", url1, headers=headers, data=payload)
  188. time.sleep(random.randint(0, 1))
  189. video_obj = response.json()
  190. video_url = video_obj.get('DownloadAddress')
  191. if len(video_url) == 0:
  192. continue
  193. duration = video_obj.get('play_len')
  194. # cover = video_obj.get('thumb_url')
  195. share_cnt = int(obj['forward_count']) # 分享
  196. like_cnt = int(obj['like_count']) # 点赞
  197. # user_name = obj['username'] # 用户名标示
  198. nick_name = obj['nickname'] # 用户名
  199. # comment_count = obj['comment_count'] # 评论数
  200. # fav_count = obj['fav_count'] # 大拇指点赞数
  201. video_percent = '%.2f' % (share_cnt / like_cnt)
  202. special = float(0.25)
  203. if share_cnt >= 300 and float(video_percent) >= special and int(duration) >= 30:
  204. return nick_name
  205. return None
  206. except Exception as e:
  207. return None
  208. """视频号加热平台相似溯源"""
  209. @classmethod
  210. def get_nrxs_list(cls, uid):
  211. cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "视频号加热")
  212. time.sleep(10)
  213. url = "http://8.217.190.241:8888/crawler/wei_xin/shi_pin_hao/jia_re"
  214. payload = json.dumps({
  215. "account_name": uid,
  216. "category": "推荐",
  217. "cookie": cookie
  218. })
  219. headers = {
  220. 'Content-Type': 'application/json'
  221. }
  222. try:
  223. # time.sleep(2)
  224. response = requests.request("POST", url, headers=headers, data=payload)
  225. response = response.json()
  226. code = response['code']
  227. if code == 0:
  228. if response['data'] == None:
  229. sqlCollect.update_machine_making_reflux(uid)
  230. return
  231. status = sqlCollect.select_crawler_uesr_v3(uid)
  232. if status:
  233. pq_id = re.sub(r'[(),]', '', str(status))
  234. else:
  235. pq_id = cls.insert_number(uid, '467')
  236. if pq_id == None:
  237. return
  238. sqlCollect.update_machine_making_reflux(uid)
  239. data_list = response['data']['data']
  240. if data_list:
  241. for data in data_list:
  242. nick_name = data['nickName'] # 用户名
  243. user_name = data['username'] # 用户v2
  244. user = cls.get_sph_data(user_name, uid)
  245. if user:
  246. # time.sleep(180)
  247. # appid = cls.get_hxpt_appid(user)
  248. # if appid:
  249. # time.sleep(180)
  250. # has_used = cls.get_hx(appid)
  251. # if has_used:
  252. # if has_used == '0':
  253. res = sqlCollect.insert_xssy_sph_info(uid, nick_name, "视频号", user_name, "0", "", pq_id)
  254. if res == 1:
  255. current_time = datetime.now()
  256. formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
  257. values = [
  258. [
  259. "视频号",
  260. nick_name,
  261. str(pq_id),
  262. "5",
  263. "通用-分享到群",
  264. "AI片尾引导",
  265. "zhifeng_emo,sijia",
  266. "",
  267. "",
  268. "AI标题",
  269. "",
  270. f"溯源账号:{uid}",
  271. formatted_time
  272. ]
  273. ]
  274. Feishu.insert_columns("WGIYsSDdxhItBwtJ0xgc0yE7nEg", '0701bd', "ROWS", 1, 2)
  275. time.sleep(0.5)
  276. Feishu.update_values("WGIYsSDdxhItBwtJ0xgc0yE7nEg", '0701bd', "B2:Z2",
  277. values)
  278. Feishu.bot("xinxin", '视频号溯源成功提示', f'原账号:{uid},溯源到的账号:{nick_name},写入账号:{pq_id}', 'xinxin')
  279. else:
  280. # sqlCollect.insert_xssy_sph_info(uid, nick_name, user_name, has_used, appid)
  281. sqlCollect.insert_xssy_sph_info(uid, nick_name, user_name, "1")
  282. # else:
  283. # sqlCollect.insert_xssy_sph_info(uid, nick_name, user_name, "1")
  284. # continue
  285. else:
  286. return None
  287. else:
  288. Feishu.bot("xinxin", '视频号加热提醒', f'视频号加热平台 cookie 失效了,请及时更换', 'xinxin')
  289. return None
  290. except Exception as e:
  291. Feishu.bot("xinxin", '视频号加热提醒', f'视频号加热平台 cookie 失效了,请及时更换', 'xinxin')
  292. return None
  293. """获取需溯源账号"""
  294. @classmethod
  295. def sph_nrxs_data(cls):
  296. user = sqlCollect.get_machine_making_reflux("视频号", "单点视频", "视频号历史", "相似溯源", "视频H品类账号")
  297. if user == None:
  298. return
  299. user = [item[0] for item in user]
  300. Feishu.bot("xinxin", '视频号溯源提醒', f'今日需溯源账号共{len(user)}条', 'xinxin')
  301. for uid in user:
  302. if re.match(r'^[A-Za-z0-9]+$', uid):
  303. # 匹配成功,进行下一次循环
  304. continue
  305. Feishu.bot("xinxin", '视频号溯源提醒', f'开始溯源账号名称{uid}', 'xinxin')
  306. list = cls.get_nrxs_list(uid)
  307. print(list)
  308. if __name__ == '__main__':
  309. SphNrxs.sph_nrxs_data()