dy_keyword.py 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101
  1. import requests
  2. import json
  3. from utils.aliyun_log import AliyunLogger
  4. from utils.feishu_form import Material
  5. from utils.sql_help import sqlCollect
  6. class DyKeyword:
  7. @classmethod
  8. def get_key_word(cls, task, fs_channel_name):
  9. combo = task['combo']
  10. content_type = combo[0]
  11. publish_time = combo[1]
  12. duration = combo[2]
  13. share_count_rule = 0
  14. special = 0
  15. short_duration_rule = 0
  16. url = "http://8.217.192.46:8889/crawler/dou_yin/keyword"
  17. list = []
  18. if not task["channel_url"] or not task["channel_url"].strip():
  19. return list
  20. payload = json.dumps({
  21. "keyword": task["channel_url"],
  22. "content_type": "视频",
  23. "sort_type": content_type,
  24. "publish_time": publish_time,
  25. "duration": duration,
  26. "cursor": ""
  27. })
  28. headers = {
  29. 'Content-Type': 'application/json'
  30. }
  31. if " 不限" == publish_time:
  32. share_count_rule = 200
  33. special = 0.15
  34. short_duration_rule = 30
  35. elif "一天内" == publish_time:
  36. share_count_rule = 0
  37. special = 0.10
  38. short_duration_rule = 25
  39. elif "一周内" == publish_time:
  40. share_count_rule = 100
  41. special = 0.15
  42. short_duration_rule = 25
  43. elif "半年内" == publish_time:
  44. share_count_rule = 200
  45. special = 0.15
  46. short_duration_rule = 25
  47. try:
  48. response = requests.request("POST", url, headers=headers, data=payload, timeout=30)
  49. response = response.json()
  50. code = response['code']
  51. if code != 0:
  52. return list
  53. data = response['data']['data']
  54. for i in range(len(data)):
  55. video_id = data[i].get('aweme_id') # 文章id
  56. day_count = Material.get_count_restrict(task["channel"])
  57. if day_count:
  58. status = sqlCollect.is_used_days(video_id, task['channel'], day_count)
  59. else:
  60. status = sqlCollect.is_used(video_id, task['channel'])
  61. video_url = data[i].get('video', {}).get('play_addr', {}).get('url_list', [None])[0]
  62. old_title = data[i].get('desc', "").strip().replace("\n", "") \
  63. .replace("/", "").replace("\\", "").replace("\r", "") \
  64. .replace(":", "").replace("*", "").replace("?", "") \
  65. .replace("?", "").replace('"', "").replace("<", "") \
  66. .replace(">", "").replace("|", "").replace(" ", "") \
  67. .replace("&NBSP", "").replace(".", "。").replace(" ", "") \
  68. .replace("'", "").replace("#", "").replace("Merge", "")
  69. digg_count = int(data[i].get('statistics').get('digg_count')) # 点赞
  70. share_count = int(data[i].get('statistics').get('share_count')) # 转发
  71. duration = data[i].get('duration')
  72. duration = duration / 1000
  73. log_data = f"user:{task['channel_url']},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title},,share_count:{share_count},,digg_count:{digg_count},,duration:{duration}"
  74. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], video_id, "扫描到一条视频", "2001", log_data)
  75. if status:
  76. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], video_id, "该视频已改造过", "2002", log_data)
  77. continue
  78. video_percent = '%.2f' % (int(share_count) / int(digg_count))
  79. if int(share_count) < share_count_rule:
  80. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], video_id, f"不符合规则:分享小于{share_count_rule}", "2003", log_data)
  81. continue
  82. if float(video_percent) < special:
  83. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], video_id, f"不符合规则:分享/点赞小于{special}", "2003", log_data)
  84. continue
  85. if int(duration) < short_duration_rule or int(duration) > 720:
  86. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], video_id, f"不符合规则:时长不符合规则大于720秒/小于{short_duration_rule}秒", "2003", log_data)
  87. continue
  88. cover_url = data[i].get('video').get('cover').get('url_list')[0] # 视频封面
  89. all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": video_percent,
  90. "old_title": old_title}
  91. list.append(all_data)
  92. AliyunLogger.logging(task['channel'], fs_channel_name, task['channel_url'], video_id, "符合规则等待改造", "2004", log_data)
  93. return list
  94. except Exception as exc:
  95. return list