search_key.py 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/10
  4. """
  5. 获取微信指数小程序请求参数:search_key
  6. 1. 启动 WinAppDriver.exe
  7. 2. 启动 Charles.exe:
  8. 2.1 选中 Proxy - Windows Proxy
  9. 2.2 选中 Tools - Auto Save - Enable Auto Save
  10. 3. 启动 Python 脚本:
  11. 3.1 cd D:\piaoquan_crawler
  12. 3.2 python .\weixinzhishu\weixinzhishu_main\search_key.py
  13. 每分钟获取最新search_key,写入飞书: https://w42nne6hzg.feishu.cn/sheets/shtcnqhMRUGunIfGnGXMOBYiy4K?sheet=sVL74k
  14. """
  15. import json
  16. import os
  17. import sys
  18. import time
  19. import psutil
  20. from appium import webdriver
  21. from selenium.webdriver.common.by import By
  22. sys.path.append(os.getcwd())
  23. from common.common import Common
  24. from common.feishu import Feishu
  25. class Searchkey:
  26. @classmethod
  27. def start_wechat(cls, log_type, crawler):
  28. try:
  29. Common.logger(log_type, crawler).info('启动"微信"')
  30. desired_caps = {'app': r"C:\Program Files (x86)\Tencent\WeChat\WeChat.exe"}
  31. driver = webdriver.Remote(
  32. command_executor='http://127.0.0.1:4723',
  33. desired_capabilities=desired_caps)
  34. driver.implicitly_wait(10)
  35. Common.logger(log_type, crawler).info('选择对话人"微信同步助手"')
  36. driver.find_elements(By.NAME, '微信同步助手')[0].click()
  37. time.sleep(1)
  38. Common.logger(log_type, crawler).info('点击"微信指数"')
  39. driver.find_elements(By.NAME, '消息')[-1].click()
  40. time.sleep(1)
  41. Common.logger(log_type, crawler).info('退出微信')
  42. driver.quit()
  43. time.sleep(1)
  44. Common.logger(log_type, crawler).info('关闭微信指数')
  45. weixinzhishu_driver = cls.close_weixinzhishu(log_type, crawler)
  46. weixinzhishu_driver.find_elements(By.NAME, '关闭')[-1].click()
  47. except Exception as e:
  48. Common.logger(log_type, crawler).error(f'start_wechat异常:{e}\n')
  49. @classmethod
  50. def close_weixinzhishu(cls, log_type, crawler, app_name='微信指数'):
  51. """
  52. *通过名字找到windowsdriver
  53. *通过窗口名称,从桌面对象获取webdriver对象
  54. """
  55. new_caps = {'app': "Root"}
  56. try:
  57. new_driver = webdriver.Remote(command_executor='http://127.0.0.1:4723', desired_capabilities=new_caps)
  58. windowElement = new_driver.find_elements(By.NAME, app_name)
  59. if len(windowElement) != 0:
  60. newWindowHandle = hex(int(windowElement[0].get_attribute("NativeWindowHandle")))
  61. app_caps = {"appTopLevelWindow": newWindowHandle}
  62. app_driver = webdriver.Remote(command_executor='http://127.0.0.1:4723',
  63. desired_capabilities=app_caps)
  64. return app_driver
  65. except Exception as e:
  66. Common.logger(log_type, crawler).error(f"close_weixinzhishu异常:{e}\n")
  67. @classmethod
  68. def kill_pid(cls, log_type, crawler):
  69. try:
  70. os.system('chcp 65001') # 将cmd的显示字符编码从默认的GBK改为UTF-8
  71. list_process = list()
  72. pid_list = psutil.pids()
  73. for sub_pid in pid_list:
  74. try:
  75. process_info = psutil.Process(sub_pid)
  76. print(process_info)
  77. if process_info.name() == 'WeChatAppEx.exe' \
  78. or process_info.name() == 'WeChatOCR.exe' \
  79. or process_info.name() == 'WeChatPlayer.exe' \
  80. or process_info.name() == 'WeChatUtility.exe':
  81. list_process.append(sub_pid)
  82. except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
  83. pass
  84. for pid in list_process:
  85. os.system('taskkill /f /pid ' + str(pid))
  86. except Exception as e:
  87. Common.logger(log_type, crawler).error(f'kill_pid异常:{e}\n')
  88. @classmethod
  89. def get_search_key(cls, log_type, crawler):
  90. try:
  91. # charles 抓包文件保存目录
  92. chlsfile_path = f"./{crawler}/{crawler}_chlsfiles/"
  93. if len(os.listdir(chlsfile_path)) == 0:
  94. Common.logger(log_type, crawler).info("chlsfile文件夹为空,等待10s")
  95. cls.start_wechat(log_type, crawler)
  96. time.sleep(10)
  97. cls.get_search_key(log_type, crawler)
  98. else:
  99. Common.logger(log_type, crawler).info(f"chlsfile_list:{sorted(os.listdir(chlsfile_path))}")
  100. # 获取最新的 chlsfile
  101. chlsfile = sorted(os.listdir(chlsfile_path))[-1]
  102. # 分离文件名与扩展名
  103. new_file = os.path.splitext(chlsfile)
  104. # 重命名文件后缀
  105. os.rename(os.path.join(chlsfile_path, chlsfile),
  106. os.path.join(chlsfile_path, new_file[0] + ".txt"))
  107. with open(f"{chlsfile_path}{new_file[0]}.txt", encoding='utf-8-sig', errors='ignore') as f:
  108. contents = json.load(f, strict=False)
  109. if "search.weixin.qq.com" not in [text['host'] for text in contents]:
  110. return "未找到search_key"
  111. else:
  112. for content in contents:
  113. if content["host"] == "search.weixin.qq.com" and content["path"] == "/cgi-bin/wxaweb/wxindexgetusergroup":
  114. # print(f"content:{content}")
  115. text = content['request']['body']['text']
  116. search_key = json.loads(text)['search_key']
  117. openid = json.loads(text)['openid']
  118. return search_key, openid
  119. except Exception as e:
  120. Common.logger(log_type, crawler).exception(f"get_search_key异常:{e}\n")
  121. return None
  122. @classmethod
  123. def remove_file(cls, log_type, crawler):
  124. try:
  125. all_file_path = f"./{crawler}/{crawler}_chlsfiles/"
  126. if not os.path.exists(all_file_path):
  127. os.mkdir(all_file_path)
  128. all_file = os.listdir(f"./{crawler}/{crawler}_chlsfiles/")
  129. for file in all_file:
  130. os.remove(f"./{crawler}/{crawler}_chlsfiles/{file}")
  131. except Exception as e:
  132. Common.logger(log_type, crawler).error(f"remove_file异常:{e}\n")
  133. @classmethod
  134. def del_search_key_from_feishu(cls, log_type, crawler):
  135. try:
  136. sheet = Feishu.get_values_batch(log_type, crawler, 'sVL74k')
  137. if len(sheet) <= 21:
  138. # print('<=20行')
  139. return
  140. else:
  141. Feishu.dimension_range(log_type, crawler, 'sVL74k', 'ROWS', 22, 22)
  142. cls.del_search_key_from_feishu(log_type, crawler)
  143. except Exception as e:
  144. Common.logger(log_type, crawler).error(f"del_search_key_from_feishu异常:{e}\n")
  145. @classmethod
  146. def write_search_key_to_feishu(cls, log_type, crawler):
  147. Common.logger(log_type, crawler).info('清除 chlsfiles 文件夹')
  148. cls.remove_file(log_type, crawler)
  149. Common.logger(log_type, crawler).info('启动微信指数小程序')
  150. cls.start_wechat(log_type, crawler)
  151. Common.logger(log_type, crawler).info('获取 search_key')
  152. while True:
  153. search_key = cls.get_search_key(log_type, crawler)
  154. if search_key is None or search_key == "未找到search_key":
  155. time.sleep(3)
  156. Common.logger(log_type, crawler).info('未找到search_key,重启打开微信指数,获取 search_key')
  157. cls.start_wechat(log_type, crawler)
  158. cls.get_search_key(log_type, crawler)
  159. else:
  160. Common.logger(log_type, crawler).info(f'已获取 search_key,openid:{search_key}')
  161. Feishu.insert_columns(log_type, crawler, 'sVL74k', 'ROWS', 1, 2)
  162. time.sleep(1)
  163. time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
  164. Feishu.update_values(log_type, crawler, 'sVL74k', 'A2:Z2', [[time_str, search_key[0], search_key[-1]]])
  165. cls.del_search_key_from_feishu(log_type, crawler)
  166. Common.logger(log_type, crawler).info(f"search_key:{search_key}写入飞书表成功\n")
  167. return
  168. if __name__ == '__main__':
  169. while True:
  170. Searchkey.write_search_key_to_feishu('searchkey', 'weixinzhishu')
  171. Common.logger('searchkey', 'weixinzhishu').info('休眠 1 分钟')
  172. time.sleep(60)
  173. # Searchkey.start_wechat('searchkey', 'weixinzhishu')