import random import time import requests import json from utils.aliyun_log import AliyunLogger from utils.feishu_form import Material from utils.sql_help import sqlCollect class KsKeyword: @classmethod def get_key_word(cls, task, fs_channel_name): combo = task['combo'] sort_type = combo[0] publish_time = combo[1] duration = combo[2] share_count_rule = 0 special = 0 short_duration_rule = 0 url = "http://8.217.192.46:8889/crawler/kuai_shou/keyword" list = [] if not task["channel_url"] or not task["channel_url"].strip(): return list payload = json.dumps({ "keyword": task["channel_url"], "content_type": "视频", "sort_type": sort_type, "publish_time": publish_time, "duration": duration, "cursor": "" }) headers = { 'Content-Type': 'application/json' } if " 不限" == publish_time: share_count_rule = 100 special = 0.0005 short_duration_rule = 25 elif "近1日" == publish_time: share_count_rule = 0 special = 0.0003 short_duration_rule = 25 elif "近7日" == publish_time: share_count_rule = 50 special = 0.0005 short_duration_rule = 25 elif "近1月" == publish_time: share_count_rule = 100 special = 0.0005 short_duration_rule = 25 try: time.sleep(3) response = requests.request("POST", url, headers=headers, data=payload, timeout=30) response = response.json() code = response['code'] if code != 0: return list data_list = response['data']['data'] for data in data_list: data = data['feed'] photo_id = data['photo_id'] day_count = Material.get_count_restrict(task["channel"]) if day_count: status = sqlCollect.is_used_days(photo_id, task['channel'], day_count) else: status = sqlCollect.is_used(photo_id, task['channel']) image_url = data['cover_thumbnail_urls'][0]['url'] video_url = data['main_mv_urls'][0]['url'] if ".mp4" not in video_url: continue view_count = data.get('view_count', 0) share_count = data.get('share_count', 0) old_title = data['caption'] # 标题 video_percent = '%.4f' % (int(share_count) / int(view_count)) duration = int(int(data["duration"]) / 1000) log_data = f"user:{task['channel_url']},,video_id:{photo_id},,video_url:'',original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}" AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, "扫描到一条视频", "2001", log_data) if status: AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, "该视频已改造过", "2002", log_data) continue if float(video_percent) < special: AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, f"不符合规则:分享/浏览{special}", "2003", log_data) continue if int(share_count) < share_count_rule: AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, f"不符合规则:分享小于{share_count_rule}", "2003", log_data) continue if int(duration) < short_duration_rule or int(duration) > 720: AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, f"不符合规则:时长不符合规则大于720秒/小于{short_duration_rule}", "2003", log_data) continue log_data = f"user:{task['channel_url']},,video_id:{photo_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,view_count:{view_count},,duration:{duration}" all_data = {"video_id": photo_id, "cover": image_url, "video_url": video_url, "rule": video_percent, "old_title": old_title} list.append(all_data) AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], photo_id, "符合规则等待改造", "2004", log_data) return list except Exception as exc: return list @classmethod def get_video(cls, video_id): url = "http://8.217.192.46:8889/crawler/kuai_shou/detail" payload = json.dumps({ "content_id": str(video_id) }) headers = { 'Content-Type': 'application/json' } time.sleep(random.uniform(1, 10)) response = requests.request("POST", url, headers=headers, data=payload, timeout=30) response = response.json() data = response["data"]["data"] video_url = data["video_url_list"][0]["video_url"] image_url = data["image_url_list"][0]["image_url"] return video_url, image_url