download_publish.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/9/1
  4. import os
  5. import random
  6. import sys
  7. import time
  8. sys.path.append(os.getcwd())
  9. from main.common import Common
  10. from main.feishu_lib import Feishu
  11. from main.publish import Publish
  12. class Download:
  13. @classmethod
  14. def download_publish(cls, log_type, env):
  15. try:
  16. recommend_sheet = Feishu.get_values_batch(log_type, 'shipinhao', 'FSDlBy')
  17. for i in range(1, len(recommend_sheet)):
  18. download_title = recommend_sheet[i][2].strip().replace('"', '') \
  19. .replace('“', '').replace('“', '…').replace("\n", "") \
  20. .replace("/", "").replace("\r", "").replace("#", "") \
  21. .replace(".", "。").replace("\\", "").replace("&NBSP", "") \
  22. .replace(":", "").replace("*", "").replace("?", "") \
  23. .replace("?", "").replace('"', "").replace("<", "") \
  24. .replace(">", "").replace("|", "").replace(" ", "")
  25. download_duration = recommend_sheet[i][3]
  26. download_like_cnt = recommend_sheet[i][4]
  27. download_share_cnt = recommend_sheet[i][5]
  28. download_favorite_cnt = recommend_sheet[i][6]
  29. download_comment_cnt = recommend_sheet[i][7]
  30. download_username = recommend_sheet[i][8]
  31. download_head_url = recommend_sheet[i][9]
  32. download_cover_url = recommend_sheet[i][10]
  33. download_video_url = recommend_sheet[i][11]
  34. Common.logger(log_type).info("download_title:{}", download_title)
  35. Common.logger(log_type).info("download_username:{}", download_username)
  36. Common.logger(log_type).info("download_video_url:{}", download_video_url)
  37. # Common.logger(log_type).info("download_vid:{}", download_vid)
  38. # Common.logger(log_type).info("download_play_cnt:{}", download_play_cnt)
  39. # Common.logger(log_type).info("download_like_cnt:{}", download_like_cnt)
  40. # Common.logger(log_type).info("download_duration:{}", download_duration)
  41. # Common.logger(log_type).info("download_userid:{}", download_userid)
  42. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  43. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  44. # 判断空行
  45. if download_video_url is None or download_title is None:
  46. Feishu.dimension_range(log_type, 'shipinhao', 'FSDlBy', 'ROWS', i + 1, i + 1)
  47. Common.logger(log_type).info("空行,删除成功\n")
  48. return
  49. # 已下载判断
  50. elif str(download_title) in [x for y in Feishu.get_values_batch(log_type, 'shipinhao', 'c77cf9') for x in y]:
  51. Feishu.dimension_range(log_type, 'shipinhao', 'FSDlBy', 'ROWS', i + 1, i + 1)
  52. Common.logger(log_type).info("视频已下载\n")
  53. return
  54. else:
  55. # 下载封面
  56. Common.download_method(log_type=log_type, text="cover",
  57. d_name=str(download_title), d_url=str(download_cover_url))
  58. # 下载视频
  59. Common.download_method(log_type=log_type, text="video",
  60. d_name=str(download_title), d_url=str(download_video_url))
  61. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  62. with open("./videos/" + download_title
  63. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  64. f_a.write('shipinhao' + str(int(time.time())) + "\n" +
  65. str(download_title) + "\n" +
  66. str(download_duration) + "\n" +
  67. str(download_favorite_cnt) + "\n" +
  68. str(download_comment_cnt) + "\n" +
  69. str(download_like_cnt) + "\n" +
  70. str(download_share_cnt) + "\n" +
  71. str(1920*1080) + "\n" +
  72. str(int(time.time())) + "\n" +
  73. str(download_username) + "\n" +
  74. str(download_head_url) + "\n" +
  75. str(download_video_url) + "\n" +
  76. str(download_cover_url) + "\n" +
  77. "shipinhao")
  78. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  79. # 上传视频
  80. Common.logger(log_type).info("开始上传视频:{}".format(download_title))
  81. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  82. if env == 'dev':
  83. our_video_link = "https://testadmin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  84. else:
  85. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  86. Common.logger(log_type).info("视频上传完成:{}", download_title)
  87. # 视频ID工作表,插入首行
  88. Feishu.insert_columns(log_type, "shipinhao", "c77cf9", "ROWS", 1, 2)
  89. # 视频ID工作表,首行写入数据
  90. upload_time = int(time.time())
  91. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  92. "推荐榜",
  93. str(download_title),
  94. our_video_link,
  95. download_duration,
  96. download_like_cnt,
  97. download_share_cnt,
  98. download_favorite_cnt,
  99. download_comment_cnt,
  100. str(download_username),
  101. str(download_head_url),
  102. str(download_cover_url),
  103. str(download_video_url)]]
  104. time.sleep(1)
  105. Feishu.update_values(log_type, "shipinhao", "c77cf9", "F2:Z2", values)
  106. # 删除行或列,可选 ROWS、COLUMNS
  107. Feishu.dimension_range(log_type, "shipinhao", "FSDlBy", "ROWS", i + 1, i + 1)
  108. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_title)
  109. return
  110. except Exception as e:
  111. Common.logger(log_type).error("download_publish异常:{}\n", e)
  112. Feishu.dimension_range(log_type, "shipinhao", "FSDlBy", "ROWS", 2, 2)
  113. @classmethod
  114. def run_download_publish(cls, log_type, env):
  115. try:
  116. while True:
  117. time.sleep(1)
  118. recommend_sheet = Feishu.get_values_batch(log_type, "shipinhao", "FSDlBy")
  119. if len(recommend_sheet) == 1:
  120. Common.logger(log_type).info("下载/上传完成\n")
  121. break
  122. else:
  123. cls.download_publish(log_type, env)
  124. time.sleep(random.randint(1, 3))
  125. except Exception as e:
  126. Common.logger(log_type).error("run_download_publish异常:{}", e)
  127. if __name__ == '__main__':
  128. Download.run_download_publish('recommend', 'dev')