public.py 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/3/27
  4. import os, sys
  5. import time
  6. import random
  7. sys.path.append(os.getcwd())
  8. from common.common import Common
  9. from common.scheduling_db import MysqlHelper
  10. # from common import Common
  11. # from scheduling_db import MysqlHelper
  12. # 过滤词库
  13. def filter_word(log_type, crawler, source, env):
  14. """
  15. 过滤词库
  16. :param log_type: 日志
  17. :param crawler: 哪款爬虫,如:xiaoniangao
  18. :param source: 哪款爬虫,如:小年糕
  19. :param env: 环境
  20. :return: word_list
  21. """
  22. select_sql = f""" select * from crawler_filter_word where source="{source}" """
  23. words = MysqlHelper.get_values(log_type, crawler, select_sql, env, action='')
  24. word_list = []
  25. if len(words) == 0:
  26. return word_list
  27. for word in words:
  28. word_list.append(word['filter_word'])
  29. return word_list
  30. def get_user_from_mysql(log_type, crawler, source, env, action=''):
  31. sql = f"select * from crawler_user_v3 where source='{source}' and mode='{log_type}'"
  32. results = MysqlHelper.get_values(log_type, crawler, sql, env, action=action)
  33. if results:
  34. return results
  35. else:
  36. Common.logger(log_type, crawler).warning(f"爬虫:{crawler},没有查到抓取名单")
  37. return []
  38. def get_config_from_mysql(log_type, source, env, text, action=''):
  39. select_sql = f"""select * from crawler_config where source="{source}" """
  40. contents = MysqlHelper.get_values(log_type, source, select_sql, env, action=action)
  41. title_list = []
  42. filter_list = []
  43. emoji_list = []
  44. for content in contents:
  45. config = content['config']
  46. config_dict = eval(config)
  47. for k, v in config_dict.items():
  48. if k == "title":
  49. title_list_config = v.split(",")
  50. for title in title_list_config:
  51. title_list.append(title)
  52. if k == "filter":
  53. filter_list_config = v.split(",")
  54. for filter_word in filter_list_config:
  55. filter_list.append(filter_word)
  56. if k == "emoji":
  57. emoji_list_config = v.split(",")
  58. for emoji in emoji_list_config:
  59. emoji_list.append(emoji)
  60. if text == "title":
  61. return title_list
  62. elif text == "filter":
  63. return filter_list
  64. elif text == "emoji":
  65. return emoji_list
  66. def random_title(log_type, crawler, env, text):
  67. random_title_list = get_config_from_mysql(log_type, crawler, env, text)
  68. return random.choice(random_title_list)
  69. def task_fun(task_str):
  70. task_str = task_str.replace("'[{", '[{').replace("}}]'", '}}]')
  71. task_dict = dict(eval(task_str))
  72. rule = task_dict['rule']
  73. task_dict['rule'] = dict()
  74. for item in rule:
  75. for k, val in item.items():
  76. task_dict['rule'][k] = val
  77. rule_dict = task_dict['rule']
  78. task_dict = {
  79. "task_dict": task_dict,
  80. "rule_dict": rule_dict
  81. }
  82. return task_dict
  83. def download_rule(log_type, crawler, video_dict, rule_dict):
  84. """
  85. 下载视频的基本规则
  86. :param log_type: 日志
  87. :param crawler: 哪款爬虫
  88. :param video_dict: 视频信息,字典格式
  89. :param rule_dict: 规则信息,字典格式
  90. :return: 满足规则,返回 True;反之,返回 False
  91. """
  92. rule_playCnt_min = rule_dict.get('playCnt', {}).get('min', 0)
  93. rule_playCnt_max = rule_dict.get('playCnt', {}).get('max', 100000000)
  94. if rule_playCnt_max == 0:
  95. rule_playCnt_max = 100000000
  96. rule_duration_min = rule_dict.get('duration', {}).get('min', 0)
  97. rule_duration_max = rule_dict.get('duration', {}).get('max', 100000000)
  98. if rule_duration_max == 0:
  99. rule_duration_max = 100000000
  100. rule_period_min = rule_dict.get('period', {}).get('min', 0)
  101. # rule_period_max = rule_dict.get('period', {}).get('max', 100000000)
  102. # if rule_period_max == 0:
  103. # rule_period_max = 100000000
  104. #
  105. # rule_fans_min = rule_dict.get('fans', {}).get('min', 0)
  106. # rule_fans_max = rule_dict.get('fans', {}).get('max', 100000000)
  107. # if rule_fans_max == 0:
  108. # rule_fans_max = 100000000
  109. #
  110. # rule_videos_min = rule_dict.get('videos', {}).get('min', 0)
  111. # rule_videos_max = rule_dict.get('videos', {}).get('max', 100000000)
  112. # if rule_videos_max == 0:
  113. # rule_videos_max = 100000000
  114. rule_like_min = rule_dict.get('like', {}).get('min', 0)
  115. rule_like_max = rule_dict.get('like', {}).get('max', 100000000)
  116. if rule_like_max == 0:
  117. rule_like_max = 100000000
  118. rule_videoWidth_min = rule_dict.get('videoWidth', {}).get('min', 0)
  119. rule_videoWidth_max = rule_dict.get('videoWidth', {}).get('max', 100000000)
  120. if rule_videoWidth_max == 0:
  121. rule_videoWidth_max = 100000000
  122. rule_videoHeight_min = rule_dict.get('videoHeight', {}).get('min', 0)
  123. rule_videoHeight_max = rule_dict.get('videoHeight', {}).get('max', 100000000)
  124. if rule_videoHeight_max == 0:
  125. rule_videoHeight_max = 100000000
  126. rule_shareCnt_min = rule_dict.get('shareCnt', {}).get('min', 0)
  127. rule_shareCnt_max = rule_dict.get('shareCnt', {}).get('max', 100000000)
  128. if rule_shareCnt_max == 0:
  129. rule_shareCnt_max = 100000000
  130. rule_commentCnt_min = rule_dict.get('commentCnt', {}).get('min', 0)
  131. rule_commentCnt_max = rule_dict.get('commentCnt', {}).get('max', 100000000)
  132. if rule_commentCnt_max == 0:
  133. rule_commentCnt_max = 100000000
  134. Common.logger(log_type, crawler).info(
  135. f'rule_duration_max:{rule_duration_max} >= duration:{int(float(video_dict["duration"]))} >= rule_duration_min:{int(rule_duration_min)}')
  136. Common.logger(log_type, crawler).info(
  137. f'rule_playCnt_max:{int(rule_playCnt_max)} >= play_cnt:{int(video_dict["play_cnt"])} >= rule_playCnt_min:{int(rule_playCnt_min)}')
  138. Common.logger(log_type, crawler).info(
  139. f'now:{int(time.time())} - publish_time_stamp:{int(video_dict["publish_time_stamp"])} <= {3600 * 24 * int(rule_period_min)}')
  140. Common.logger(log_type, crawler).info(
  141. f'rule_like_max:{int(rule_like_max)} >= like_cnt:{int(video_dict["like_cnt"])} >= rule_like_min:{int(rule_like_min)}')
  142. Common.logger(log_type, crawler).info(
  143. f'rule_commentCnt_max:{int(rule_commentCnt_max)} >= comment_cnt:{int(video_dict["comment_cnt"])} >= rule_commentCnt_min:{int(rule_commentCnt_min)}')
  144. Common.logger(log_type, crawler).info(
  145. f'rule_shareCnt_max:{int(rule_shareCnt_max)} >= share_cnt:{int(video_dict["share_cnt"])} >= rule_shareCnt_min:{int(rule_shareCnt_min)}')
  146. Common.logger(log_type, crawler).info(
  147. f'rule_videoWidth_max:{int(rule_videoWidth_max)} >= video_width:{int(video_dict["video_width"])} >= rule_videoWidth_min:{int(rule_videoWidth_min)}')
  148. Common.logger(log_type, crawler).info(
  149. f'rule_videoHeight_max:{int(rule_videoHeight_max)} >= video_height:{int(video_dict["video_height"])} >= rule_videoHeight_min:{int(rule_videoHeight_min)}')
  150. if int(rule_duration_max) >= int(float(video_dict["duration"])) >= int(rule_duration_min) \
  151. and int(rule_playCnt_max) >= int(video_dict['play_cnt']) >= int(rule_playCnt_min) \
  152. and int(time.time()) - int(video_dict["publish_time_stamp"]) <= 3600 * 24 * int(rule_period_min) \
  153. and int(rule_like_max) >= int(video_dict['like_cnt']) >= int(rule_like_min) \
  154. and int(rule_commentCnt_max) >= int(video_dict['comment_cnt']) >= int(rule_commentCnt_min) \
  155. and int(rule_shareCnt_max) >= int(video_dict['share_cnt']) >= int(rule_shareCnt_min) \
  156. and int(rule_videoWidth_max) >= int(video_dict['video_width']) >= int(rule_videoWidth_min) \
  157. and int(rule_videoHeight_max) >= int(video_dict['video_height']) >= int(rule_videoHeight_min):
  158. return True
  159. else:
  160. return False
  161. if __name__ == "__main__":
  162. # print(filter_word('public', 'xiaoniangao', '小年糕', 'prod'))
  163. print(get_config_from_mysql('hour', 'xiaoniangao', 'dev', 'emoji'))
  164. # task_str = "[('task_id','11')," \
  165. # "('task_name','小年糕小时榜')," \
  166. # "('source','xiaoniangao')," \
  167. # "('start_time','1681834560000')," \
  168. # "('interval','1'),('mode','hour')," \
  169. # "('rule','[{'duration':{'min':40,'max':0}},{'playCnt':{'min':4000,'max':0}},{'period':{'min':10,'max':0}},{'fans':{'min':0,'max':0}},{'videos':{'min':0,'max':0}},{'like':{'min':0,'max':0}},{'videoWidth':{'min':0,'max':0}},{'videoHeight':{'min':0,'max':0}}]')," \
  170. # "('spider_name','')," \
  171. # "('machine','')," \
  172. # "('status','0')," \
  173. # "('create_time','1681889875288')," \
  174. # "('update_time','1681889904908')," \
  175. # "('operator','王坤')]"
  176. # print(task(task_str))
  177. pass