search_key.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/10
  4. """
  5. 获取微信指数小程序请求参数:search_key
  6. 1. 启动 WinAppDriver.exe
  7. 2. 启动 Charles.exe:
  8. 2.1 选中 Proxy - Windows Proxy
  9. 2.2 选中 Tools - Auto Save - Enable Auto Save
  10. 3. 启动 Python 脚本:
  11. 3.1 cd D:\piaoquan_crawler
  12. 3.2 python .\weixinzhishu\weixinzhishu_main\search_key.py
  13. 每分钟获取最新search_key,写入飞书: https://w42nne6hzg.feishu.cn/sheets/shtcnqhMRUGunIfGnGXMOBYiy4K?sheet=sVL74k
  14. """
  15. import json
  16. import os
  17. import shutil
  18. import sys
  19. import time
  20. import psutil
  21. from appium import webdriver
  22. from selenium.webdriver.common.by import By
  23. sys.path.append(os.getcwd())
  24. from common.common import Common
  25. from common.feishu import Feishu
  26. class Searchkey:
  27. @classmethod
  28. def start_wechat(cls, log_type, crawler):
  29. try:
  30. # Common.logger(log_type, crawler).info('启动"微信"')
  31. desired_caps = {'app': r"C:\Program Files (x86)\Tencent\WeChat\WeChat.exe"}
  32. driver = webdriver.Remote(
  33. command_executor='http://127.0.0.1:4723',
  34. desired_capabilities=desired_caps)
  35. driver.implicitly_wait(10)
  36. # Common.logger(log_type, crawler).info('点击微信指数')
  37. driver.find_elements(By.NAME, '消息')[-1].click()
  38. time.sleep(2)
  39. cls.kill_pid(log_type, crawler)
  40. time.sleep(2)
  41. driver.quit()
  42. time.sleep(2)
  43. cls.rmtree_WMPFRuntime()
  44. except Exception as e:
  45. Common.logger(log_type, crawler).error(f'start_wechat异常:{e}\n')
  46. @classmethod
  47. def kill_pid(cls, log_type, crawler):
  48. try:
  49. os.system('chcp 65001') # 将cmd的显示字符编码从默认的GBK改为UTF-8
  50. list_process = list()
  51. pid_list = psutil.pids()
  52. for sub_pid in pid_list:
  53. try:
  54. process_info = psutil.Process(sub_pid)
  55. print(process_info)
  56. if process_info.name() == 'WeChatAppEx.exe' \
  57. or process_info.name() == 'WeChatOCR.exe' \
  58. or process_info.name() == 'WeChatPlayer.exe' \
  59. or process_info.name() == 'WeChatUtility.exe':
  60. list_process.append(sub_pid)
  61. except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
  62. pass
  63. for pid in list_process:
  64. os.system('taskkill /f /pid ' + str(pid))
  65. except Exception as e:
  66. Common.logger(log_type, crawler).error(f'kill_pid异常:{e}\n')
  67. @classmethod
  68. def rmtree_WMPFRuntime(cls):
  69. WMPFRuntime_path = r"C:\Users\guosh\AppData\Roaming\Tencent\WeChat\XPlugin\Plugins\WMPFRuntime"
  70. shutil.rmtree(WMPFRuntime_path)
  71. if not os.path.exists(WMPFRuntime_path):
  72. os.mkdir(WMPFRuntime_path)
  73. @classmethod
  74. def get_search_key(cls, log_type, crawler):
  75. try:
  76. # charles 抓包文件保存目录
  77. chlsfile_path = f"./{crawler}/{crawler}_chlsfiles/"
  78. if len(os.listdir(chlsfile_path)) == 0:
  79. Common.logger(log_type, crawler).info("chlsfile文件夹为空,等待10s")
  80. cls.start_wechat(log_type, crawler)
  81. time.sleep(10)
  82. cls.get_search_key(log_type, crawler)
  83. else:
  84. Common.logger(log_type, crawler).info(f"chlsfile_list:{sorted(os.listdir(chlsfile_path))}")
  85. # 获取最新的 chlsfile
  86. chlsfile = sorted(os.listdir(chlsfile_path))[-1]
  87. # 分离文件名与扩展名
  88. new_file = os.path.splitext(chlsfile)
  89. # 重命名文件后缀
  90. os.rename(os.path.join(chlsfile_path, chlsfile),
  91. os.path.join(chlsfile_path, new_file[0] + ".txt"))
  92. with open(f"{chlsfile_path}{new_file[0]}.txt", encoding='utf-8-sig', errors='ignore') as f:
  93. contents = json.load(f, strict=False)
  94. if "search.weixin.qq.com" not in [text['host'] for text in contents]:
  95. return "未找到search_key"
  96. else:
  97. for content in contents:
  98. if content["host"] == "search.weixin.qq.com" and content["path"] == "/cgi-bin/wxaweb/wxindexgetusergroup":
  99. # print(f"content:{content}")
  100. text = content['request']['body']['text']
  101. search_key = json.loads(text)['search_key']
  102. openid = json.loads(text)['openid']
  103. return search_key, openid
  104. except Exception as e:
  105. Common.logger(log_type, crawler).exception(f"get_search_key异常:{e}\n")
  106. return None
  107. @classmethod
  108. def remove_file(cls, log_type, crawler):
  109. try:
  110. all_file_path = f"./{crawler}/{crawler}_chlsfiles/"
  111. if not os.path.exists(all_file_path):
  112. os.mkdir(all_file_path)
  113. all_file = os.listdir(f"./{crawler}/{crawler}_chlsfiles/")
  114. for file in all_file:
  115. os.remove(f"./{crawler}/{crawler}_chlsfiles/{file}")
  116. except Exception as e:
  117. Common.logger(log_type, crawler).error(f"remove_file异常:{e}\n")
  118. @classmethod
  119. def del_search_key_from_feishu(cls, log_type, crawler):
  120. try:
  121. sheet = Feishu.get_values_batch(log_type, crawler, 'sVL74k')
  122. if len(sheet) <= 21:
  123. # print('<=20行')
  124. return
  125. else:
  126. Feishu.dimension_range(log_type, crawler, 'sVL74k', 'ROWS', 22, 22)
  127. cls.del_search_key_from_feishu(log_type, crawler)
  128. except Exception as e:
  129. Common.logger(log_type, crawler).error(f"del_search_key_from_feishu异常:{e}\n")
  130. @classmethod
  131. def write_search_key_to_feishu(cls, log_type, crawler):
  132. Common.logger(log_type, crawler).info('清除 chlsfiles 文件夹')
  133. cls.remove_file(log_type, crawler)
  134. Common.logger(log_type, crawler).info('启动微信指数小程序')
  135. cls.start_wechat(log_type, crawler)
  136. Common.logger(log_type, crawler).info('获取 search_key')
  137. while True:
  138. search_key = cls.get_search_key(log_type, crawler)
  139. if search_key is None or search_key == "未找到search_key":
  140. time.sleep(3)
  141. Common.logger(log_type, crawler).info('未找到search_key,重启打开微信指数,获取 search_key')
  142. cls.start_wechat(log_type, crawler)
  143. cls.get_search_key(log_type, crawler)
  144. else:
  145. Common.logger(log_type, crawler).info(f'已获取 search_key,openid:{search_key}')
  146. Feishu.insert_columns(log_type, crawler, 'sVL74k', 'ROWS', 1, 2)
  147. time.sleep(1)
  148. time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
  149. Feishu.update_values(log_type, crawler, 'sVL74k', 'A2:Z2', [[time_str, search_key[0], search_key[-1]]])
  150. cls.del_search_key_from_feishu(log_type, crawler)
  151. Common.logger(log_type, crawler).info(f"search_key:{search_key}写入飞书表成功\n")
  152. return
  153. if __name__ == '__main__':
  154. # Searchkey.kill_pid('searchkey', 'weixinzhishu')
  155. while True:
  156. Searchkey.write_search_key_to_feishu('searchkey', 'weixinzhishu')
  157. Common.logger('searchkey', 'weixinzhishu').info('休眠 1 分钟')
  158. time.sleep(60)