shipinhao_get_url.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/9/1
  4. import json
  5. import os
  6. import sys
  7. import time
  8. # import atomacos
  9. from appium import webdriver
  10. from selenium.webdriver.common.by import By
  11. sys.path.append(os.getcwd())
  12. # from crawler_shipinhao.main.common import Common
  13. # from crawler_shipinhao.main.feishu_lib import Feishu
  14. from main.feishu_lib import Feishu
  15. from main.common import Common
  16. class GetUrl:
  17. @classmethod
  18. def click_video(cls, log_type, video_title):
  19. Common.logger(log_type).info('启动"微信"')
  20. desired_caps = {'app': r"C:\Program Files (x86)\Tencent\WeChat\WeChat.exe"}
  21. driver = webdriver.Remote(
  22. command_executor='http://127.0.0.1:4723',
  23. desired_capabilities=desired_caps)
  24. driver.implicitly_wait(10)
  25. Common.logger(log_type).info('点击"聊天窗口"')
  26. print('点击"聊天窗口"')
  27. driver.find_element(By.NAME, '聊天').click()
  28. Common.logger(log_type).info('点击"爬虫群"')
  29. print('点击"爬虫群"')
  30. driver.find_element(By.NAME, '爬虫群').click()
  31. Common.logger(log_type).info('点击视频:{}', video_title)
  32. print(f'点击视频{video_title}')
  33. # driver.find_element(By.NAME, video_title[:10]).click()
  34. els = driver.find_elements(By.TAG_NAME, '列表项目')
  35. print(len(els))
  36. time.sleep(5)
  37. print('退出')
  38. driver.quit()
  39. @classmethod
  40. def get_url(cls, log_type):
  41. try:
  42. # charles 抓包文件保存目录
  43. # charles_file_dir = r"./crawler_kanyikan_recommend/chlsfiles/"
  44. charles_file_dir = r"./chlsfiles/"
  45. if int(len(os.listdir(charles_file_dir))) == 1:
  46. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  47. time.sleep(60)
  48. else:
  49. # 目标文件夹下所有文件
  50. all_file = sorted(os.listdir(charles_file_dir))
  51. # 获取到目标文件
  52. old_file = all_file[-1]
  53. # 分离文件名与扩展名
  54. new_file = os.path.splitext(old_file)
  55. # 重命名文件后缀
  56. os.rename(os.path.join(charles_file_dir, old_file),
  57. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  58. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  59. contents = json.load(f, strict=False)
  60. video_url_list = []
  61. cover_url_list = []
  62. if "finder.video.qq.com" in [text['host'] for text in contents]:
  63. for text in contents:
  64. if text["host"] == "finder.video.qq.com" and text["path"] == "/251/20302/stodownload":
  65. video_url_list.append(text)
  66. elif text["host"] == "finder.video.qq.com" and text["path"] == "/251/20304/stodownload":
  67. cover_url_list.append(text)
  68. video_url = video_url_list[0]['host']+video_url_list[0]['path']+'?'+video_url_list[0]['query']
  69. cover_url = cover_url_list[0]['host']+cover_url_list[0]['path']+'?'+cover_url_list[0]['query']
  70. head_url = cover_url
  71. return video_url, cover_url, head_url
  72. else:
  73. Common.logger(log_type).info("未找到 url,10s后重新获取")
  74. time.sleep(10)
  75. cls.get_url(log_type)
  76. except Exception as e:
  77. Common.logger(log_type).exception("get_url异常:{}", e)
  78. return None
  79. @classmethod
  80. def write_url(cls, log_type):
  81. while True:
  82. if Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')[1][11] is None:
  83. # Common.logger(log_type).info('开始点击分享的视频')
  84. print(f"开始点击分享的视频:{Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')[1][2]}")
  85. cls.click_video(log_type, Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')[1][2])
  86. time.sleep(60)
  87. Common.logger(log_type).info('获取视频头像/封面/播放地址')
  88. print('获取视频头像/封面/播放地址')
  89. urls = cls.get_url(log_type)
  90. Feishu.update_values(log_type, 'shipinhao', '', 'J2:L2', [[urls[2], urls[1], urls[0]]])
  91. Common.logger(log_type).info('视频地址信息写入飞书成功\n')
  92. break
  93. else:
  94. break
  95. if __name__ == '__main__':
  96. GetUrl.write_url('recommend')