dy_ls.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. import random
  2. import time
  3. import requests
  4. import json
  5. from common import Common, Feishu, AliyunLogger
  6. from common.sql_help import sqlCollect
  7. class DYLS:
  8. @classmethod
  9. def get_dy_zr_list(cls, task_mark, url_id, number, mark, channel_id, name):
  10. url = "http://8.217.192.46:8889/crawler/dou_yin/blogger"
  11. list = []
  12. next_cursor = ''
  13. for i in range(5):
  14. try:
  15. payload = json.dumps({
  16. "account_id": url_id,
  17. "source": "app",
  18. "sort": "最热",
  19. "cursor": next_cursor
  20. })
  21. headers = {
  22. 'Content-Type': 'application/json'
  23. }
  24. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  25. time.sleep(random.randint(1, 5))
  26. response = response.json()
  27. code = response['code']
  28. if code != 0:
  29. Common.logger("dy-ls").info(f"抖音历史数据获取失败,接口为/dou_yin/blogge\n")
  30. return list
  31. data_list = response['data']
  32. next_cursor = str(data_list['next_cursor'])
  33. data = data_list['data']
  34. for i in range(len(data)):
  35. video_id = data[i].get('aweme_id') # 文章id
  36. # status = sqlCollect.is_used(task_mark, video_id, mark, "抖音")
  37. # if status:
  38. status = sqlCollect.is_used(task_mark, video_id, mark, "抖音历史")
  39. video_uri = data[i].get('video', {}).get('play_addr', {}).get('uri')
  40. ratio = f'{data[i].get("video", {}).get("height")}p'
  41. # video_url = f'https://www.iesdouyin.com/aweme/v1/play/?video_id={video_uri}&ratio={ratio}&line=0'
  42. video_url = data[i].get('video', {}).get('play_addr', {}).get('url_list', [None])[0]
  43. # 视频链接
  44. digg_count = int(data[i].get('statistics').get('digg_count')) # 点赞
  45. share_count = int(data[i].get('statistics').get('share_count')) # 转发
  46. duration = data[i].get('duration')
  47. duration = duration / 1000
  48. old_title = data[i].get('desc', "").strip().replace("\n", "") \
  49. .replace("/", "").replace("\\", "").replace("\r", "") \
  50. .replace(":", "").replace("*", "").replace("?", "") \
  51. .replace("?", "").replace('"', "").replace("<", "") \
  52. .replace(">", "").replace("|", "").replace(" ", "") \
  53. .replace("&NBSP", "").replace(".", "。").replace(" ", "") \
  54. .replace("'", "").replace("#", "").replace("Merge", "")
  55. log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,digg_count:{digg_count},,duration:{duration}"
  56. AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data)
  57. Common.logger("dy-ls").info(
  58. f"扫描:{task_mark},用户主页id:{url_id},视频id{video_id} ,分享:{share_count},点赞{digg_count}")
  59. if status:
  60. AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data)
  61. continue
  62. video_percent = '%.2f' % (int(share_count) / int(digg_count))
  63. special = float(0.25)
  64. if int(share_count) < 500:
  65. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于500", "2003", log_data)
  66. Common.logger("dy-ls").info(
  67. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,分享:{share_count},点赞{digg_count} ,时长:{int(duration)} ")
  68. continue
  69. if float(video_percent) < special:
  70. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/点赞小于0.25", "2003", log_data)
  71. Common.logger("dy-ls").info(
  72. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,分享:{share_count},点赞{digg_count} ,时长:{int(duration)} ")
  73. continue
  74. if int(duration) < 30 or int(duration) > 720:
  75. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data)
  76. Common.logger("dy-ls").info(
  77. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,分享:{share_count},点赞{digg_count} ,时长:{int(duration)} ")
  78. continue
  79. cover_url = data[i].get('video').get('cover').get('url_list')[0] # 视频封面
  80. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent,
  81. "old_title": old_title}
  82. list.append(all_data)
  83. AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data)
  84. if len(list) == int(number):
  85. Common.logger("dy-ls").info(f"获取抖音历史视频总数:{len(list)}\n")
  86. return list
  87. if next_cursor == False:
  88. return list
  89. except Exception as exc:
  90. Common.logger("dy-ls").info(f"抖音历史数据获取失败:{exc}\n")
  91. return list
  92. return list
  93. return list
  94. @classmethod
  95. def get_dyls_list(cls, task_mark, url_id, number, mark):
  96. next_cursor = ""
  97. for i in range(10):
  98. list = []
  99. try:
  100. # 抖查查
  101. url = "http://8.217.192.46:8889/crawler/dou_yin/blogger"
  102. payload = json.dumps({
  103. "account_id": url_id,
  104. "source": "抖查查",
  105. "cursor": next_cursor
  106. })
  107. headers = {
  108. 'Content-Type': 'application/json'
  109. }
  110. time.sleep(random.randint(1, 5))
  111. response = requests.request("POST", url, headers=headers, data=payload)
  112. response = response.json()
  113. data_all_list = response["data"]
  114. has_more = data_all_list["has_more"]
  115. next_cursor = str(data_all_list["next_cursor"])
  116. data_list = data_all_list["data"]
  117. for data in data_list:
  118. # comment_count = data["comment_count"]
  119. # download_count = data["download_count"]
  120. share_count = data["share_count"]
  121. good_count = data["good_count"]
  122. # collect_count = data["collect_count"]
  123. duration = data["duration"]
  124. video_id = data["video_id"]
  125. old_title = data["video_desc"]
  126. status = sqlCollect.is_used(video_id, mark, "抖音")
  127. if status:
  128. status = sqlCollect.is_used(video_id, mark, "抖音历史")
  129. if status == False:
  130. continue
  131. video_percent = '%.2f' % (int(share_count) / int(good_count))
  132. special = float(0.25)
  133. duration = duration / 1000
  134. if int(share_count) < 500 or float(video_percent) < special or int(duration) < 30 or int(duration) > 720:
  135. Common.logger("dy-ls").info(
  136. f"不符合规则:{task_mark},用户主页id:{url_id},视频id{video_id} ,分享:{share_count},点赞{good_count} ,时长:{int(duration)} ")
  137. continue
  138. video_url, image_url = cls.get_video(video_id)
  139. if video_url:
  140. all_data = {"video_id": video_id, "cover": image_url, "video_url": video_url, "rule": video_percent,
  141. "old_title": old_title}
  142. list.append(all_data)
  143. if len(list) == int(number):
  144. Common.logger("dy-ls").info(f"获取抖音历史视频总数:{len(list)}\n")
  145. return list
  146. else:
  147. Common.logger("dy-ls").info(f"抖音历史获取url失败")
  148. Feishu.finish_bot("dou_yin/detail接口无法获取到视频链接",
  149. "https://open.feishu.cn/open-apis/bot/v2/hook/575ca6a1-84b4-4a2f-983b-1d178e7b16eb",
  150. "【抖音异常提示 】")
  151. if has_more == False:
  152. return list
  153. except Exception as exc:
  154. Common.logger("dy-ls").info(f"抖音历史数据获取失败:{exc}\n")
  155. return list
  156. @classmethod
  157. def get_video(cls, video_id):
  158. url = "http://8.217.192.46:8889/crawler/dou_yin/detail"
  159. for i in range(3):
  160. payload = json.dumps({
  161. "content_id": str(video_id)
  162. })
  163. headers = {
  164. 'Content-Type': 'application/json'
  165. }
  166. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  167. response = response.json()
  168. code = response["code"]
  169. if code == 10000:
  170. time.sleep(60)
  171. data = response["data"]["data"]
  172. video_url = data["video_url_list"][0]["video_url"]
  173. image_url = data["image_url_list"][0]["image_url"]
  174. return video_url, image_url
  175. return None, None
  176. if __name__ == '__main__':
  177. DYLS.get_dy_zr_list(1,2,1,3)
  178. # DYLS.get_dyls_list("1","MS4wLjABAAAA2QEvnEb7cQDAg6vZXq3j8_LlbO_DiturnV7VeybFKY4",1,"1")