shipinhao_get_url.py 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/9/1
  4. import json
  5. import os
  6. import sys
  7. import time
  8. # import atomacos
  9. from appium import webdriver
  10. from selenium.webdriver.common.by import By
  11. sys.path.append(os.getcwd())
  12. # from crawler_shipinhao.main.common import Common
  13. # from crawler_shipinhao.main.feishu_lib import Feishu
  14. from main.feishu_lib import Feishu
  15. from main.common import Common
  16. class GetUrl:
  17. @classmethod
  18. def click_video(cls, log_type, video_title):
  19. Common.logger(log_type).info('启动"微信"')
  20. desired_caps = {'app': r"C:\Program Files (x86)\Tencent\WeChat\WeChat.exe"}
  21. driver = webdriver.Remote(
  22. command_executor='http://127.0.0.1:4723',
  23. desired_capabilities=desired_caps)
  24. driver.implicitly_wait(10)
  25. Common.logger(log_type).info('点击"爬虫群"')
  26. print('点击"爬虫群"')
  27. driver.find_element(By.NAME, '爬虫群').click()
  28. Common.logger(log_type).info('点击视频:{}', video_title)
  29. print(f'点击视频{video_title}')
  30. driver.find_element(By.NAME, video_title).click()
  31. time.sleep(5)
  32. print('退出')
  33. driver.quit()
  34. @classmethod
  35. def get_url(cls, log_type):
  36. try:
  37. # charles 抓包文件保存目录
  38. # charles_file_dir = r"./crawler_kanyikan_recommend/chlsfiles/"
  39. charles_file_dir = r"./chlsfiles/"
  40. if int(len(os.listdir(charles_file_dir))) == 1:
  41. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  42. time.sleep(60)
  43. else:
  44. # 目标文件夹下所有文件
  45. all_file = sorted(os.listdir(charles_file_dir))
  46. # 获取到目标文件
  47. old_file = all_file[-1]
  48. # 分离文件名与扩展名
  49. new_file = os.path.splitext(old_file)
  50. # 重命名文件后缀
  51. os.rename(os.path.join(charles_file_dir, old_file),
  52. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  53. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  54. contents = json.load(f, strict=False)
  55. video_url_list = []
  56. cover_url_list = []
  57. if "finder.video.qq.com" in [text['host'] for text in contents]:
  58. for text in contents:
  59. if text["host"] == "finder.video.qq.com" and text["path"] == "/251/20302/stodownload":
  60. video_url_list.append(text)
  61. elif text["host"] == "finder.video.qq.com" and text["path"] == "/251/20304/stodownload":
  62. cover_url_list.append(text)
  63. video_url = video_url_list[0]['host']+video_url_list[0]['path']+'?'+video_url_list[0]['query']
  64. cover_url = cover_url_list[0]['host']+cover_url_list[0]['path']+'?'+cover_url_list[0]['query']
  65. head_url = cover_url
  66. return video_url, cover_url, head_url
  67. else:
  68. Common.logger(log_type).info("未找到 url,10s后重新获取")
  69. time.sleep(10)
  70. cls.get_url(log_type)
  71. except Exception as e:
  72. Common.logger(log_type).exception("get_url异常:{}", e)
  73. return None
  74. @classmethod
  75. def write_url(cls, log_type):
  76. while True:
  77. if Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')[1][11] is None:
  78. # Common.logger(log_type).info('开始点击分享的视频')
  79. print(f"开始点击分享的视频:{Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')[1][2]}")
  80. cls.click_video(log_type, Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')[1][2])
  81. time.sleep(60)
  82. Common.logger(log_type).info('获取视频头像/封面/播放地址')
  83. print('获取视频头像/封面/播放地址')
  84. urls = cls.get_url(log_type)
  85. Feishu.update_values(log_type, 'shipinhao', '', 'J2:L2', [[urls[2], urls[1], urls[0]]])
  86. Common.logger(log_type).info('视频地址信息写入飞书成功\n')
  87. break
  88. else:
  89. break
  90. if __name__ == '__main__':
  91. GetUrl.write_url('recommend')