feishu_form.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. # -*- coding: utf-8 -*-
  2. import json
  3. import os
  4. import random
  5. import sys
  6. import datetime
  7. from common.sql_help import sqlCollect
  8. sys.path.append(os.getcwd())
  9. from common.feishu_utils import Feishu
  10. class Material():
  11. """
  12. 获取汇总表所有负责人列表
  13. """
  14. @classmethod
  15. def feishu_list(cls):
  16. summary = Feishu.get_values_batch("summary", "bc154d")
  17. list = []
  18. for row in summary[1:]:
  19. mark = row[0]
  20. name = row[1]
  21. feishu_id = row[3]
  22. feishu_sheet = row[4]
  23. cookie_sheet = row[5]
  24. number = {"mark": mark, "name": name, "feishu_id": feishu_id, "feishu_sheet": feishu_sheet, "cookie_sheet": cookie_sheet}
  25. if mark:
  26. list.append(number)
  27. else:
  28. return list
  29. return list
  30. @classmethod
  31. def get_sph_user(cls):
  32. data = Feishu.get_values_batch("GPbhsb5vchAN3qtzot6cu1f0n1c", "cc7ef0")
  33. user_data_list = []
  34. try:
  35. for row in data[1:]:
  36. users = str(row[2])
  37. if users and users != 'None':
  38. if ',' in users:
  39. user_list = users.split(',')
  40. else:
  41. user_list = [users]
  42. for user in user_list:
  43. status = sqlCollect.sph_channel_user(user)
  44. if status:
  45. user_data_list.append(user)
  46. else:
  47. return user_data_list
  48. return user_data_list
  49. except:
  50. return user_data_list
  51. """
  52. 获取对应负责人任务明细
  53. """
  54. @classmethod
  55. def get_task_data(cls, feishu_id, feishu_sheet):
  56. data = Feishu.get_values_batch(feishu_id, feishu_sheet)
  57. processed_list = []
  58. try:
  59. for row in data[1:]:
  60. channel_id = row[1]
  61. channel_url = str(row[2])
  62. piaoquan_id = row[3]
  63. number = row[4]
  64. video_share = row[5]
  65. video_ending = row[6]
  66. voice = row[7]
  67. crop_tool = row[8]
  68. gg_duration = row[9]
  69. title = row[10]
  70. if channel_url == None or channel_url == "" or len(channel_url) == 0:
  71. continue
  72. try:
  73. ls_number = int(row[11])
  74. except:
  75. ls_number = None
  76. def count_items(item, separator):
  77. if item and item not in {'None', ''}:
  78. return len(item.split(separator))
  79. return 0
  80. video_id_total = count_items(str(channel_url), ',')
  81. title_total = count_items(str(title), '/')
  82. video_ending_total = count_items(str(video_ending), ',')
  83. values = [channel_id, video_id_total, piaoquan_id, video_share, video_ending_total, crop_tool, gg_duration, title_total]
  84. filtered_values = [str(value) for value in values if value is not None and value != "None"]
  85. task_mark = "_".join(map(str, filtered_values))
  86. if piaoquan_id and piaoquan_id not in {'None', ''}:
  87. if ',' in channel_url:
  88. channel_url = channel_url.split(',')
  89. else:
  90. channel_url = [channel_url]
  91. for user in channel_url:
  92. number_dict = {
  93. "task_mark": task_mark,
  94. "channel_id": channel_id,
  95. "channel_url": user,
  96. "piaoquan_id": piaoquan_id,
  97. "number": number,
  98. "title": title,
  99. "video_share": video_share,
  100. "video_ending": video_ending,
  101. "crop_total": crop_tool,
  102. "gg_duration_total": gg_duration,
  103. "voice": voice
  104. }
  105. processed_list.append(json.dumps(number_dict, ensure_ascii=False))
  106. if channel_id == "抖音" or channel_id == "快手" or channel_id == "视频号":
  107. if ls_number and ls_number not in {'None', ''}:
  108. if channel_id == "抖音":
  109. new_channel_id = "抖音历史"
  110. if channel_id == "快手":
  111. new_channel_id = "快手历史"
  112. if channel_id == "视频号":
  113. new_channel_id = "视频号历史"
  114. values1 = [new_channel_id, video_id_total, piaoquan_id, video_share, video_ending_total, crop_tool,
  115. gg_duration, title_total]
  116. filtered_values1 = [str(value) for value in values1 if value is not None and value != "None"]
  117. task_mark1 = "_".join(map(str, filtered_values1))
  118. number_dict = {
  119. "task_mark": task_mark1,
  120. "channel_id": new_channel_id,
  121. "channel_url": user,
  122. "piaoquan_id": piaoquan_id,
  123. "number": ls_number,
  124. "title": title,
  125. "video_share": video_share,
  126. "video_ending": video_ending,
  127. "crop_total": crop_tool,
  128. "gg_duration_total": gg_duration,
  129. "voice": voice
  130. }
  131. processed_list.append(json.dumps(number_dict, ensure_ascii=False))
  132. else:
  133. return processed_list
  134. return processed_list
  135. except:
  136. return processed_list
  137. """
  138. 获取对应片尾+srt
  139. """
  140. @classmethod
  141. def get_pwsrt_data(cls, feishu_id, feishu_sheet, video_ending):
  142. data = Feishu.get_values_batch(feishu_id, feishu_sheet)
  143. for row in data[1:]:
  144. pw_mark = row[0]
  145. pw_id = row[1]
  146. pw_srt = row[2]
  147. if pw_id != 'None' and pw_id != '' and pw_id != None:
  148. if pw_mark == video_ending:
  149. number = {"pw_id": pw_id, "pw_srt": pw_srt}
  150. return number
  151. return ''
  152. """
  153. 获取对应固定字幕
  154. """
  155. @classmethod
  156. def get_pzsrt_data(cls, feishu_id, feishu_sheet, video_share_name):
  157. data = Feishu.get_values_batch(feishu_id, feishu_sheet)
  158. for row in data[1:]:
  159. pz_mark = row[0]
  160. pz_zm = row[1]
  161. if pz_zm != 'None' and pz_zm != '' and pz_zm != None:
  162. if pz_mark == video_share_name:
  163. return pz_zm
  164. return ''
  165. """
  166. 获取 cookie 信息
  167. """
  168. @classmethod
  169. def get_cookie_data(cls, feishu_id, cookie_sheet, channel):
  170. data = Feishu.get_values_batch(feishu_id, cookie_sheet)
  171. for row in data[1:]:
  172. channel_mask = row[0]
  173. cookie = row[1]
  174. if channel_mask == channel:
  175. return cookie