import random import time import requests import json from common import Feishu, AliyunLogger, Material from common.sql_help import sqlCollect class DYX: @classmethod def get_dy_list(cls, task_mark, url_id, number, mark, channel_id, name): url = "http://8.217.192.46:8889/crawler/dou_yin/blogger" list = [] next_cursor = '' if not url_id or not url_id.strip(): return list for i in range(5): try: payload = json.dumps({ "account_id": url_id, "source": "app", "sort_type": "最新", "cursor": next_cursor }) headers = { 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload, timeout=30) time.sleep(random.randint(1, 5)) response = response.json() code = response['code'] if code != 0: return list data_list = response['data'] next_cursor = str(data_list['next_cursor']) data = data_list['data'] for i in range(len(data)): video_id = data[i].get('aweme_id') # 文章id day_count = Material.get_count_restrict(channel_id) if day_count: status = sqlCollect.is_used_days(video_id, mark, channel_id, day_count) else: status = sqlCollect.is_used(video_id, mark, channel_id) video_url = data[i].get('video', {}).get('play_addr', {}).get('url_list', [None])[0] # 视频链接 digg_count = int(data[i].get('statistics').get('digg_count')) # 点赞 share_count = int(data[i].get('statistics').get('share_count')) # 转发 duration = data[i].get('duration') duration = duration / 1000 old_title = data[i].get('desc', "").strip().replace("\n", "") \ .replace("/", "").replace("\\", "").replace("\r", "") \ .replace(":", "").replace("*", "").replace("?", "") \ .replace("?", "").replace('"', "").replace("<", "") \ .replace(">", "").replace("|", "").replace(" ", "") \ .replace("&NBSP", "").replace(".", "。").replace(" ", "") \ .replace("'", "").replace("#", "").replace("Merge", "") log_data = f"user:{url_id},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,digg_count:{digg_count},,duration:{duration}" AliyunLogger.logging(channel_id, name, url_id, video_id, "扫描到一条视频", "2001", log_data) if status: AliyunLogger.logging(channel_id, name, url_id, video_id, "该视频已改造过", "2002", log_data) continue video_percent = '%.2f' % (int(share_count) / int(digg_count)) special = float(0.15) if int(share_count) < 200: AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享小于200", "2003", log_data) continue if float(video_percent) < special: AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:分享/点赞小于0.15", "2003", log_data) continue if int(duration) < 30 or int(duration) > 720: AliyunLogger.logging(channel_id, name, url_id, video_id, "不符合规则:时长不符合规则大于720秒/小于30秒", "2003", log_data) continue cover_url = data[i].get('video').get('cover').get('url_list')[0] # 视频封面 all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent, "old_title": old_title} list.append(all_data) AliyunLogger.logging(channel_id, name, url_id, video_id, "符合规则等待改造", "2004", log_data) if len(list) == int(number): return list if next_cursor == False: return list except Exception as exc: return list return list return list @classmethod def get_video(cls, video_id): url = "http://8.217.192.46:8889/crawler/dou_yin/detail" for i in range(3): payload = json.dumps({ "content_id": str(video_id) }) headers = { 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload, timeout=30) response = response.json() code = response["code"] if code == 10000: time.sleep(60) data = response["data"]["data"] video_url = data["video_url_list"][0]["video_url"] image_url = data["image_url_list"][0]["image_url"] return video_url, image_url return None, None if __name__ == '__main__': # DYX.get_dy_list(1,2,1,3) DYX.get_dy_list("1","MS4wLjABAAAA2QEvnEb7cQDAg6vZXq3j8_LlbO_DiturnV7VeybFKY4",1,"1",'', "")