search_key.py 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/10
  4. """
  5. 获取微信指数小程序请求参数:search_key
  6. 1. 启动 WinAppDriver.exe
  7. 2. 启动 Charles.exe:
  8. 2.1 选中 Proxy - Windows Proxy
  9. 2.2 选中 Tools - Auto Save - Enable Auto Save
  10. 3. 启动 Python 脚本:
  11. 3.1 cd D:\piaoquan_crawler
  12. 3.2 python .\weixinzhishu\weixinzhishu_main\search_key.py
  13. 每分钟获取最新search_key,写入飞书: https://w42nne6hzg.feishu.cn/sheets/shtcnqhMRUGunIfGnGXMOBYiy4K?sheet=sVL74k
  14. """
  15. import json
  16. import os
  17. import sys
  18. import time
  19. import psutil
  20. from appium import webdriver
  21. from selenium.webdriver.common.by import By
  22. sys.path.append(os.getcwd())
  23. from common.common import Common
  24. from common.feishu import Feishu
  25. class Searchkey:
  26. @classmethod
  27. def start_wechat(cls, log_type, crawler):
  28. try:
  29. # Common.logger(log_type, crawler).info('启动"微信"')
  30. desired_caps = {'app': r"C:\Program Files (x86)\Tencent\WeChat\WeChat.exe"}
  31. driver = webdriver.Remote(
  32. command_executor='http://127.0.0.1:4723',
  33. desired_capabilities=desired_caps)
  34. driver.implicitly_wait(10)
  35. # Common.logger(log_type, crawler).info('点击微信指数')
  36. driver.find_elements(By.NAME, '消息')[-1].click()
  37. time.sleep(1)
  38. time.sleep(1)
  39. cls.kill_pid(log_type, crawler)
  40. driver.quit()
  41. except Exception as e:
  42. Common.logger(log_type, crawler).error(f'start_wechat异常:{e}\n')
  43. @classmethod
  44. def kill_pid(cls, log_type, crawler):
  45. try:
  46. os.system('chcp 65001') # 将cmd的显示字符编码从默认的GBK改为UTF-8
  47. list_process = list()
  48. pid_list = psutil.pids()
  49. for sub_pid in pid_list:
  50. try:
  51. process_info = psutil.Process(sub_pid)
  52. print(process_info)
  53. if process_info.name() == 'WeChatAppEx.exe' \
  54. or process_info.name() == 'WeChatOCR.exe' \
  55. or process_info.name() == 'WeChatPlayer.exe' \
  56. or process_info.name() == 'WeChatUtility.exe':
  57. list_process.append(sub_pid)
  58. except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
  59. pass
  60. for pid in list_process:
  61. os.system('taskkill /f /pid ' + str(pid))
  62. except Exception as e:
  63. Common.logger(log_type, crawler).error(f'kill_pid异常:{e}\n')
  64. @classmethod
  65. def get_search_key(cls, log_type, crawler):
  66. try:
  67. # charles 抓包文件保存目录
  68. chlsfile_path = f"./{crawler}/{crawler}_chlsfiles/"
  69. if len(os.listdir(chlsfile_path)) == 0:
  70. Common.logger(log_type, crawler).info("chlsfile文件夹为空,等待10s")
  71. cls.start_wechat(log_type, crawler)
  72. time.sleep(10)
  73. cls.get_search_key(log_type, crawler)
  74. else:
  75. Common.logger(log_type, crawler).info(f"chlsfile_list:{sorted(os.listdir(chlsfile_path))}")
  76. # 获取最新的 chlsfile
  77. chlsfile = sorted(os.listdir(chlsfile_path))[-1]
  78. # 分离文件名与扩展名
  79. new_file = os.path.splitext(chlsfile)
  80. # 重命名文件后缀
  81. os.rename(os.path.join(chlsfile_path, chlsfile),
  82. os.path.join(chlsfile_path, new_file[0] + ".txt"))
  83. with open(f"{chlsfile_path}{new_file[0]}.txt", encoding='utf-8-sig', errors='ignore') as f:
  84. contents = json.load(f, strict=False)
  85. if "search.weixin.qq.com" not in [text['host'] for text in contents]:
  86. return "未找到search_key"
  87. else:
  88. for content in contents:
  89. if content["host"] == "search.weixin.qq.com" and content["path"] == "/cgi-bin/wxaweb/wxindexgetusergroup":
  90. # print(f"content:{content}")
  91. text = content['request']['body']['text']
  92. search_key = json.loads(text)['search_key']
  93. openid = json.loads(text)['openid']
  94. return search_key, openid
  95. except Exception as e:
  96. Common.logger(log_type, crawler).exception(f"get_search_key异常:{e}\n")
  97. return None
  98. @classmethod
  99. def remove_file(cls, log_type, crawler):
  100. try:
  101. all_file_path = f"./{crawler}/{crawler}_chlsfiles/"
  102. if not os.path.exists(all_file_path):
  103. os.mkdir(all_file_path)
  104. all_file = os.listdir(f"./{crawler}/{crawler}_chlsfiles/")
  105. for file in all_file:
  106. os.remove(f"./{crawler}/{crawler}_chlsfiles/{file}")
  107. except Exception as e:
  108. Common.logger(log_type, crawler).error(f"remove_file异常:{e}\n")
  109. @classmethod
  110. def del_search_key_from_feishu(cls, log_type, crawler):
  111. try:
  112. sheet = Feishu.get_values_batch(log_type, crawler, 'sVL74k')
  113. if len(sheet) <= 21:
  114. # print('<=20行')
  115. return
  116. else:
  117. Feishu.dimension_range(log_type, crawler, 'sVL74k', 'ROWS', 22, 22)
  118. cls.del_search_key_from_feishu(log_type, crawler)
  119. except Exception as e:
  120. Common.logger(log_type, crawler).error(f"del_search_key_from_feishu异常:{e}\n")
  121. @classmethod
  122. def write_search_key_to_feishu(cls, log_type, crawler):
  123. Common.logger(log_type, crawler).info('清除 chlsfiles 文件夹')
  124. cls.remove_file(log_type, crawler)
  125. Common.logger(log_type, crawler).info('启动微信指数小程序')
  126. cls.start_wechat(log_type, crawler)
  127. Common.logger(log_type, crawler).info('获取 search_key')
  128. while True:
  129. search_key = cls.get_search_key(log_type, crawler)
  130. if search_key is None or search_key == "未找到search_key":
  131. time.sleep(3)
  132. Common.logger(log_type, crawler).info('未找到search_key,重启打开微信指数,获取 search_key')
  133. cls.start_wechat(log_type, crawler)
  134. cls.get_search_key(log_type, crawler)
  135. else:
  136. Common.logger(log_type, crawler).info(f'已获取 search_key,openid:{search_key}')
  137. Feishu.insert_columns(log_type, crawler, 'sVL74k', 'ROWS', 1, 2)
  138. time.sleep(1)
  139. time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
  140. Feishu.update_values(log_type, crawler, 'sVL74k', 'A2:Z2', [[time_str, search_key[0], search_key[-1]]])
  141. cls.del_search_key_from_feishu(log_type, crawler)
  142. Common.logger(log_type, crawler).info(f"search_key:{search_key}写入飞书表成功\n")
  143. return
  144. if __name__ == '__main__':
  145. # Searchkey.kill_pid('searchkey', 'weixinzhishu')
  146. while True:
  147. Searchkey.write_search_key_to_feishu('searchkey', 'weixinzhishu')
  148. Common.logger('searchkey', 'weixinzhishu').info('休眠 1 分钟')
  149. time.sleep(60)