dy_rdb_nrxs.py 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142
  1. import json
  2. import re
  3. import time
  4. from datetime import datetime
  5. import requests
  6. from common import Feishu, Material, Common
  7. from common.sql_help import sqlCollect
  8. from xssy_channel.sph_jr_nrxs import SphNrxs
  9. class DyRdbNrxs:
  10. @classmethod
  11. def get_dy_rdb_nrxs(cls):
  12. user = sqlCollect.get_machine_making_reflux("抖音", "抖音历史", "相似溯源", "单点视频", "抖音品类账号")
  13. if user == None:
  14. return
  15. user = [item[0] for item in user]
  16. # Feishu.bot("xinxin", '抖音溯源提醒', f'今日需溯源账号共{len(user)}条', 'xinxin')
  17. for uid in user:
  18. if uid.startswith("MS"):
  19. Feishu.bot("xinxin", '视频号溯源提醒', f'开始溯源账号名称{uid}', 'xinxin')
  20. cls.get_nrxs_data(uid)
  21. @classmethod
  22. def get_nrxs_data(cls, uid):
  23. cookie = Material.get_cookie_data("KsoMsyP2ghleM9tzBfmcEEXBnXg", "U1gySe", "热点包-cookie")
  24. url = f"https://douhot.douyin.com/douhot/v1/author_analysis/fans_interest/similar_author?sec_uid={uid}"
  25. payload = {}
  26. headers = {
  27. 'accept': 'application/json, text/plain, */*',
  28. 'accept-language': 'zh-CN,zh;q=0.9',
  29. 'cookie': cookie,
  30. 'sec-ch-ua': '"Chromium";v="128", "Not;A=Brand";v="24", "Google Chrome";v="128"',
  31. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36'
  32. }
  33. try:
  34. response = requests.request("GET", url, headers=headers, data=payload)
  35. response = response.json()
  36. code = response['code']
  37. if code == 0:
  38. status = sqlCollect.select_crawler_uesr_v3(uid)
  39. if status:
  40. pq_id = re.sub(r'[(),]', '', str(status))
  41. else:
  42. pq_id = SphNrxs.insert_number(uid, '499')
  43. if pq_id == None:
  44. return
  45. data_list = response['data']
  46. if data_list:
  47. for data in data_list:
  48. user_id = data['user_id']
  49. nick_name = data['nick_name']
  50. has_used = cls.get_rdb_data(user_id, cookie)
  51. if has_used:
  52. res = sqlCollect.insert_xssy_sph_info(uid, user_id, "抖音", nick_name, str(has_used), "", pq_id)
  53. if has_used == 0 and res == 1:
  54. current_time = datetime.now()
  55. formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
  56. values = [
  57. [
  58. "抖音",
  59. user_id,
  60. str(pq_id),
  61. "5",
  62. "通用-分享到群",
  63. "AI片尾引导",
  64. "zhifeng_emo,aifei,sijia,stella",
  65. "",
  66. "",
  67. "AI标题",
  68. "",
  69. f"溯源账号:{uid}",
  70. formatted_time
  71. ]
  72. ]
  73. Feishu.insert_columns("WGIYsSDdxhItBwtJ0xgc0yE7nEg", '0701bd', "ROWS", 1, 2)
  74. time.sleep(0.5)
  75. Feishu.update_values("WGIYsSDdxhItBwtJ0xgc0yE7nEg", '0701bd', "B2:Z2",
  76. values)
  77. Feishu.bot("xinxin", '抖音溯源成功提示', f'原账号:{uid},溯源到的账号:{user_id},写入账号:{pq_id}', 'xinxin')
  78. else:
  79. sqlCollect.insert_xssy_sph_info(uid, user_id, "抖音", nick_name, "1")
  80. sqlCollect.update_machine_making_reflux(uid)
  81. sqlCollect.update_machine_making_reflux(uid)
  82. else:
  83. Feishu.bot("xinxin", '热点宝提醒', f'热点宝平台 cookie 失效了,请及时更换', 'xinxin')
  84. return None
  85. except Exception as e:
  86. Feishu.bot("xinxin", '热点宝提醒', f'热点宝平台 cookie 失效了,请及时更换', 'xinxin')
  87. Common.logger("dy_rdb_nrxs").error(f"用户名:{uid}视频号加热bot异常:{e}\n")
  88. return
  89. @classmethod
  90. def get_rdb_data(cls, user_id, cookie):
  91. url = "http://8.217.190.241:8888/crawler/dou_yin/re_dian_bao/account_fans_portrait"
  92. payload = json.dumps({
  93. "account_id": user_id,
  94. "cookie": cookie
  95. })
  96. headers = {
  97. 'Content-Type': 'application/json'
  98. }
  99. response = requests.request("POST", url, headers=headers, data=payload)
  100. response = response.json()
  101. code = response['code']
  102. if code == 0:
  103. data = response['data']['data']
  104. posts = data['posts']
  105. avg_like_count = int(posts['avg_like_count'])
  106. avg_share_count = int(posts['avg_share_count'])
  107. if avg_share_count == 0:
  108. return 2
  109. if avg_like_count != 0:
  110. avg_count = avg_share_count/avg_like_count
  111. if float(avg_count) < 0.02:
  112. return 2
  113. fans = data['fans']
  114. fans_data = fans['age']['data']
  115. if fans_data:
  116. max_age_group = max(fans_data, key=lambda k: float(fans_data[k]["percentage"].strip('%')))
  117. if max_age_group == "50-":
  118. return 0
  119. else:
  120. return 3
  121. else:
  122. Feishu.bot("xinxin", '热点宝提醒', f'热点宝cookie 失效了,请及时更换', 'xinxin')
  123. return None
  124. if __name__ == '__main__':
  125. DyRdbNrxs.get_dy_rdb_nrxs()