ks_keyword.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. import random
  2. import time
  3. import requests
  4. import json
  5. from utils.aliyun_log import AliyunLogger
  6. from utils.feishu_form import Material
  7. from utils.sql_help import sqlCollect
  8. class KsKeyword:
  9. @classmethod
  10. def get_key_word(cls, task, fs_channel_name):
  11. combo = task['combo']
  12. sort_type = combo[0]
  13. publish_time = combo[1]
  14. duration = combo[2]
  15. share_count_rule = 0
  16. special = 0
  17. short_duration_rule = 0
  18. url = "http://8.217.192.46:8889/crawler/kuai_shou/keyword"
  19. list = []
  20. if not task["channel_url"] or not task["channel_url"].strip():
  21. return list
  22. payload = json.dumps({
  23. "keyword": task["channel_url"],
  24. "content_type": "视频",
  25. "sort_type": sort_type,
  26. "publish_time": publish_time,
  27. "duration": duration,
  28. "cursor": ""
  29. })
  30. headers = {
  31. 'Content-Type': 'application/json'
  32. }
  33. if " 不限" == publish_time:
  34. share_count_rule = 100
  35. special = 0.0005
  36. short_duration_rule = 25
  37. elif "近1日" == publish_time:
  38. share_count_rule = 0
  39. special = 0.0003
  40. short_duration_rule = 25
  41. elif "近7日" == publish_time:
  42. share_count_rule = 50
  43. special = 0.0005
  44. short_duration_rule = 25
  45. elif "近1月" == publish_time:
  46. share_count_rule = 100
  47. special = 0.0005
  48. short_duration_rule = 25
  49. try:
  50. time.sleep(3)
  51. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  52. response = response.json()
  53. code = response['code']
  54. if code != 0:
  55. return list
  56. data_list = response['data']['data']
  57. for data in data_list:
  58. data = data['feed']
  59. photo_id = data['photo_id']
  60. day_count = Material.get_count_restrict(task["channel"])
  61. if day_count:
  62. status = sqlCollect.is_used_days(photo_id, task['channel'], day_count)
  63. else:
  64. status = sqlCollect.is_used(photo_id, task['channel'])
  65. image_url = data['cover_thumbnail_urls'][0]['url']
  66. video_url = data['main_mv_urls'][0]['url']
  67. if ".mp4" not in video_url:
  68. continue
  69. view_count = data.get('view_count', 0)
  70. share_count = data.get('share_count', 0)
  71. old_title = data['caption'] # 标题
  72. video_percent = '%.4f' % (int(share_count) / int(view_count))
  73. duration = int(int(data["duration"]) / 1000)
  74. log_data = f"user:{task['channel_url']},,video_id:{photo_id},,video_url:'',original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}"
  75. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, "扫描到一条视频", "2001", log_data)
  76. if status:
  77. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, "该视频已改造过", "2002", log_data)
  78. continue
  79. if float(video_percent) < special:
  80. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, f"不符合规则:分享/浏览{special}", "2003", log_data)
  81. continue
  82. if int(share_count) < share_count_rule:
  83. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, f"不符合规则:分享小于{share_count_rule}", "2003", log_data)
  84. continue
  85. if int(duration) < short_duration_rule or int(duration) > 720:
  86. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, f"不符合规则:时长不符合规则大于720秒/小于{short_duration_rule}", "2003",
  87. log_data)
  88. continue
  89. log_data = f"user:{task['channel_url']},,video_id:{photo_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}"
  90. all_data = {"video_id": photo_id, "cover": image_url, "video_url": video_url,
  91. "rule": video_percent,
  92. "old_title": old_title}
  93. list.append(all_data)
  94. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, "符合规则等待改造", "2004", log_data)
  95. return list
  96. except Exception as exc:
  97. return list
  98. @classmethod
  99. def get_video(cls, video_id):
  100. url = "http://8.217.192.46:8889/crawler/kuai_shou/detail"
  101. payload = json.dumps({
  102. "content_id": str(video_id)
  103. })
  104. headers = {
  105. 'Content-Type': 'application/json'
  106. }
  107. time.sleep(random.uniform(1, 10))
  108. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  109. response = response.json()
  110. data = response["data"]["data"]
  111. video_url = data["video_url_list"][0]["video_url"]
  112. image_url = data["image_url_list"][0]["image_url"]
  113. return video_url, image_url