dy.py 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117
  1. import random
  2. import time
  3. import requests
  4. import json
  5. from common import Feishu, AliyunLogger, Material
  6. from common.sql_help import sqlCollect
  7. class DYX:
  8. @classmethod
  9. def get_dy_list(cls, task_mark, url_id, number, mark, channel_id, name):
  10. url = "http://8.217.192.46:8889/crawler/dou_yin/blogger"
  11. list = []
  12. next_cursor = ''
  13. if not url_id or not url_id.strip():
  14. return list
  15. for i in range(5):
  16. try:
  17. payload = json.dumps({
  18. "account_id": url_id,
  19. "source": "app",
  20. "sort_type": "最新",
  21. "cursor": next_cursor
  22. })
  23. headers = {
  24. 'Content-Type': 'application/json'
  25. }
  26. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  27. time.sleep(random.randint(1, 5))
  28. response = response.json()
  29. code = response['code']
  30. if code != 0:
  31. return list
  32. data_list = response['data']
  33. next_cursor = str(data_list['next_cursor'])
  34. data = data_list['data']
  35. for i in range(len(data)):
  36. video_id = data[i].get('aweme_id') # 文章id
  37. day_count = Material.get_count_restrict(channel_id)
  38. if day_count:
  39. status = sqlCollect.is_used_days(video_id, mark, channel_id, day_count)
  40. else:
  41. status = sqlCollect.is_used(video_id, mark, channel_id)
  42. video_url = data[i].get('video', {}).get('play_addr', {}).get('url_list', [None])[0]
  43. # 视频链接
  44. digg_count = int(data[i].get('statistics').get('digg_count')) # 点赞
  45. share_count = int(data[i].get('statistics').get('share_count')) # 转发
  46. duration = data[i].get('duration')
  47. duration = duration / 1000
  48. old_title = data[i].get('desc', "").strip().replace("\n", "") \
  49. .replace("/", "").replace("\\", "").replace("\r", "") \
  50. .replace(":", "").replace("*", "").replace("?", "") \
  51. .replace("?", "").replace('"', "").replace("<", "") \
  52. .replace(">", "").replace("|", "").replace(" ", "") \
  53. .replace("&NBSP", "").replace(".", "。").replace(" ", "") \
  54. .replace("'", "").replace("#", "").replace("Merge", "")
  55. log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,digg_count:{digg_count},,duration:{duration}"
  56. AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data)
  57. if status:
  58. AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data)
  59. continue
  60. video_percent = '%.2f' % (int(share_count) / int(digg_count))
  61. special = float(0.15)
  62. if int(share_count) < 200:
  63. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于200", "2003", log_data)
  64. continue
  65. if float(video_percent) < special:
  66. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/点赞小于0.15", "2003", log_data)
  67. continue
  68. if int(duration) < 30 or int(duration) > 720:
  69. AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data)
  70. continue
  71. cover_url = data[i].get('video').get('cover').get('url_list')[0] # 视频封面
  72. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent,
  73. "old_title": old_title}
  74. list.append(all_data)
  75. AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data)
  76. if len(list) == int(number):
  77. return list
  78. if next_cursor == False:
  79. return list
  80. except Exception as exc:
  81. return list
  82. return list
  83. return list
  84. @classmethod
  85. def get_video(cls, video_id):
  86. url = "http://8.217.192.46:8889/crawler/dou_yin/detail"
  87. for i in range(3):
  88. payload = json.dumps({
  89. "content_id": str(video_id)
  90. })
  91. headers = {
  92. 'Content-Type': 'application/json'
  93. }
  94. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  95. response = response.json()
  96. code = response["code"]
  97. if code == 10000:
  98. time.sleep(60)
  99. data = response["data"]["data"]
  100. video_url = data["video_url_list"][0]["video_url"]
  101. image_url = data["image_url_list"][0]["image_url"]
  102. return video_url, image_url
  103. return None, None
  104. if __name__ == '__main__':
  105. # DYX.get_dy_list(1,2,1,3)
  106. DYX.get_dy_list("1","MS4wLjABAAAA2QEvnEb7cQDAg6vZXq3j8_LlbO_DiturnV7VeybFKY4",1,"1",'', "")