dy_ls.py 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185
  1. import random
  2. import time
  3. import requests
  4. import json
  5. from common import Feishu, AliyunLogger, Material
  6. from common.sql_help import sqlCollect
  7. class DYLS:
  8. @classmethod
  9. def get_dy_zr_list(cls, task_mark, url_id, number, mark, channel_id, name):
  10. url = "http://8.217.192.46:8889/crawler/dou_yin/blogger"
  11. list = []
  12. next_cursor = ''
  13. if not url_id or not url_id.strip():
  14. return list
  15. for i in range(5):
  16. try:
  17. payload = json.dumps({
  18. "account_id": url_id,
  19. "source": "app",
  20. "sort": "最热",
  21. "cursor": next_cursor
  22. })
  23. headers = {
  24. 'Content-Type': 'application/json'
  25. }
  26. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  27. time.sleep(random.randint(1, 5))
  28. response = response.json()
  29. code = response['code']
  30. if code != 0:
  31. return list
  32. data_list = response['data']
  33. next_cursor = str(data_list['next_cursor'])
  34. data = data_list['data']
  35. for i in range(len(data)):
  36. video_id = data[i].get('aweme_id') # 文章id
  37. # status = sqlCollect.is_used(task_mark, video_id, mark, "抖音")
  38. # if status:
  39. day_count = Material.get_count_restrict(channel_id)
  40. if day_count:
  41. status = sqlCollect.is_used_days(video_id, mark, "抖音历史", day_count)
  42. else:
  43. status = sqlCollect.is_used(video_id, mark, "抖音历史")
  44. video_uri = data[i].get('video', {}).get('play_addr', {}).get('uri')
  45. ratio = f'{data[i].get("video", {}).get("height")}p'
  46. # video_url = f'https://www.iesdouyin.com/aweme/v1/play/?video_id={video_uri}&ratio={ratio}&line=0'
  47. video_url = data[i].get('video', {}).get('play_addr', {}).get('url_list', [None])[0]
  48. # 视频链接
  49. digg_count = int(data[i].get('statistics').get('digg_count')) # 点赞
  50. share_count = int(data[i].get('statistics').get('share_count')) # 转发
  51. duration = data[i].get('duration')
  52. duration = duration / 1000
  53. old_title = data[i].get('desc', "").strip().replace("\n", "") \
  54. .replace("/", "").replace("\\", "").replace("\r", "") \
  55. .replace(":", "").replace("*", "").replace("?", "") \
  56. .replace("?", "").replace('"', "").replace("<", "") \
  57. .replace(">", "").replace("|", "").replace(" ", "") \
  58. .replace("&NBSP", "").replace(".", "。").replace(" ", "") \
  59. .replace("'", "").replace("#", "").replace("Merge", "")
  60. log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,digg_count:{digg_count},,duration:{duration}"
  61. AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data)
  62. if status:
  63. AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data)
  64. continue
  65. video_percent = '%.2f' % (int(share_count) / int(digg_count))
  66. special = float(0.25)
  67. if int(share_count) < 500:
  68. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于500", "2003", log_data)
  69. continue
  70. if float(video_percent) < special:
  71. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/点赞小于0.25", "2003", log_data)
  72. continue
  73. if int(duration) < 30 or int(duration) > 720:
  74. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data)
  75. continue
  76. cover_url = data[i].get('video').get('cover').get('url_list')[0] # 视频封面
  77. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent,
  78. "old_title": old_title}
  79. list.append(all_data)
  80. AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data)
  81. if len(list) == int(number):
  82. return list
  83. if next_cursor == False:
  84. return list
  85. except Exception as exc:
  86. return list
  87. return list
  88. return list
  89. @classmethod
  90. def get_dyls_list(cls, task_mark, url_id, number, mark):
  91. next_cursor = ""
  92. for i in range(10):
  93. list = []
  94. try:
  95. # 抖查查
  96. url = "http://8.217.192.46:8889/crawler/dou_yin/blogger"
  97. payload = json.dumps({
  98. "account_id": url_id,
  99. "source": "抖查查",
  100. "cursor": next_cursor
  101. })
  102. headers = {
  103. 'Content-Type': 'application/json'
  104. }
  105. time.sleep(random.randint(1, 5))
  106. response = requests.request("POST", url, headers=headers, data=payload)
  107. response = response.json()
  108. data_all_list = response["data"]
  109. has_more = data_all_list["has_more"]
  110. next_cursor = str(data_all_list["next_cursor"])
  111. data_list = data_all_list["data"]
  112. for data in data_list:
  113. # comment_count = data["comment_count"]
  114. # download_count = data["download_count"]
  115. share_count = data["share_count"]
  116. good_count = data["good_count"]
  117. # collect_count = data["collect_count"]
  118. duration = data["duration"]
  119. video_id = data["video_id"]
  120. old_title = data["video_desc"]
  121. status = sqlCollect.is_used(video_id, mark, "抖音")
  122. if status:
  123. status = sqlCollect.is_used(video_id, mark, "抖音历史")
  124. if status == False:
  125. continue
  126. video_percent = '%.2f' % (int(share_count) / int(good_count))
  127. special = float(0.25)
  128. duration = duration / 1000
  129. if int(share_count) < 500 or float(video_percent) < special or int(duration) < 30 or int(duration) > 720:
  130. continue
  131. video_url, image_url = cls.get_video(video_id)
  132. if video_url:
  133. all_data = {"video_id": video_id, "cover": image_url, "video_url": video_url, "rule": video_percent,
  134. "old_title": old_title}
  135. list.append(all_data)
  136. if len(list) == int(number):
  137. return list
  138. else:
  139. Feishu.finish_bot("dou_yin/detail接口无法获取到视频链接",
  140. "https://open.feishu.cn/open-apis/bot/v2/hook/575ca6a1-84b4-4a2f-983b-1d178e7b16eb",
  141. "【抖音异常提示 】")
  142. if has_more == False:
  143. return list
  144. except Exception as exc:
  145. return list
  146. @classmethod
  147. def get_video(cls, video_id):
  148. url = "http://8.217.192.46:8889/crawler/dou_yin/detail"
  149. for i in range(3):
  150. payload = json.dumps({
  151. "content_id": str(video_id)
  152. })
  153. headers = {
  154. 'Content-Type': 'application/json'
  155. }
  156. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  157. response = response.json()
  158. code = response["code"]
  159. if code == 10000:
  160. time.sleep(60)
  161. data = response["data"]["data"]
  162. video_url = data["video_url_list"][0]["video_url"]
  163. image_url = data["image_url_list"][0]["image_url"]
  164. return video_url, image_url
  165. return None, None
  166. if __name__ == '__main__':
  167. DYLS.get_dy_zr_list(1,2,1,3)
  168. # DYLS.get_dyls_list("1","MS4wLjABAAAA2QEvnEb7cQDAg6vZXq3j8_LlbO_DiturnV7VeybFKY4",1,"1")