suisuiniannianyingfuqi_recommend_scheduling.py 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/4/13
  4. import json
  5. import os
  6. import sys
  7. import time
  8. import uuid
  9. import requests
  10. import urllib3
  11. from requests.adapters import HTTPAdapter
  12. from common.mq import MQ
  13. sys.path.append(os.getcwd())
  14. from common.common import Common
  15. from common.public import download_rule
  16. from common.scheduling_db import MysqlHelper
  17. from common.aliyun_log import AliyunLogger
  18. class SuisuiniannianyingfuqiRecommendScheduling:
  19. platform = "岁岁年年迎福气"
  20. @classmethod
  21. def repeat_video(cls, log_type, crawler, video_id, env):
  22. # sql = f""" select * from crawler_video where platform="岁岁年年迎福气" and out_video_id="{video_id}"; """
  23. sql = f""" select * from crawler_video where platform in ("{crawler}","{cls.platform}") and out_video_id="{video_id}"; """
  24. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env)
  25. return len(repeat_video)
  26. @classmethod
  27. def get_videoList(cls, log_type, crawler, our_uid, rule_dict, env):
  28. mq = MQ(topic_name="topic_crawler_etl_" + env)
  29. page = 1
  30. while True:
  31. try:
  32. # url = 'https://www.jzkksp.com/index/home/get_home_list.html'
  33. url = 'https://www.angjukk.cn/index/home/get_home_list.html'
  34. headers = {
  35. 'content-type': 'application/x-www-form-urlencoded',
  36. 'Accept-Encoding': 'gzip,compress,br,deflate',
  37. 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 13_3_1 like Mac OS X) '
  38. 'AppleWebKit/605.1.15 (KHTML, like Gecko) '
  39. 'Mobile/15E148 MicroMessenger/8.0.25(0x1800192b) NetType/WIFI Language/zh_CN',
  40. 'Referer': 'https://servicewechat.com/wxd4c54f60812f6f36/1/page-frame.html',
  41. }
  42. data = {
  43. 'token': '851ae159fd33f955bf433e7c47a4a298',
  44. 'time': '1667905857000',
  45. 'str_data': 'uT551tU8',
  46. 'page': str(page),
  47. 'limit': '10',
  48. 'appid': 'wxd4c54f60812f6f36',
  49. 'version': '1.4.1',
  50. 'openid': 'oDAjy5SCFe7Ml3PNgiow3ncozL1o'
  51. }
  52. urllib3.disable_warnings()
  53. s = requests.session()
  54. # max_retries=3 重试3次
  55. s.mount('http://', HTTPAdapter(max_retries=3))
  56. s.mount('https://', HTTPAdapter(max_retries=3))
  57. response = s.post(url=url, headers=headers, data=data, verify=False, timeout=5)
  58. page += 1
  59. if response.status_code != 200:
  60. Common.logger(log_type, crawler).warning(f'get_videoList:{response.status_code}, {response.text}\n')
  61. Common.logging(log_type, crawler, env, f'get_videoList:{response.status_code}, {response.text}\n')
  62. return
  63. elif 'data' not in response.json():
  64. Common.logger(log_type, crawler).warning(f'get_videoList:{response.status_code}, {response.json()}\n')
  65. Common.logging(log_type, crawler, env, f'get_videoList:{response.status_code}, {response.text}\n')
  66. return
  67. elif len(response.json()['data']['video_list']['data']) == 0:
  68. Common.logger(log_type, crawler).info(f'没有更多数据啦~ {response.json()}\n')
  69. Common.logging(log_type, crawler, env, f'没有更多数据啦~ {response.json()}\n')
  70. return
  71. else:
  72. feeds = response.json()['data']['video_list']['data']
  73. for i in range(len(feeds)):
  74. try:
  75. trace_id = crawler + str(uuid.uuid1())
  76. AliyunLogger.logging(
  77. code="1001",
  78. platform=crawler,
  79. mode=log_type,
  80. env=env,
  81. data=feeds[i],
  82. message="扫描到一条视频"
  83. )
  84. publish_time_str = feeds[i].get('createtime', '')
  85. publish_time_stamp = int(time.mktime(time.strptime(publish_time_str, "%Y-%m-%d")))
  86. video_dict = {'video_title': feeds[i].get('title', "").replace("'", "").replace('"', ''),
  87. 'video_id': str(feeds[i].get('id', '')),
  88. 'play_cnt': feeds[i].get('browse', 0),
  89. 'comment_cnt': 0,
  90. 'like_cnt': 0,
  91. 'share_cnt': 0,
  92. 'publish_time_stamp': publish_time_stamp,
  93. 'publish_time_str': publish_time_str,
  94. 'user_name': "岁岁年年迎福气",
  95. 'user_id': "suisuiniannianyingfuqi",
  96. 'avatar_url': feeds[i].get('thumb', ''),
  97. 'cover_url': feeds[i].get('thumb', ''),
  98. 'video_url': feeds[i].get('url', ''),
  99. 'session': f"suisuiniannianyingfuqi-{int(time.time())}"}
  100. for k, v in video_dict.items():
  101. Common.logger(log_type, crawler).info(f"{k}:{v}")
  102. Common.logging(log_type, crawler, env, f'video_dict:{video_dict}')
  103. if video_dict["video_id"] == '' or video_dict["video_title"] == '' or video_dict["cover_url"] == '' or video_dict["video_url"] == '':
  104. Common.logger(log_type, crawler).info('无效视频\n')
  105. Common.logging(log_type, crawler, env, '无效视频\n')
  106. elif download_rule(log_type=log_type, crawler=crawler, video_dict=video_dict, rule_dict=rule_dict) is False:
  107. Common.logger(log_type, crawler).info("不满足抓取规则\n")
  108. Common.logging(log_type, crawler, env, "不满足抓取规则\n")
  109. elif cls.repeat_video(log_type, crawler, video_dict["video_id"], env) != 0:
  110. Common.logger(log_type, crawler).info('视频已下载\n')
  111. Common.logging(log_type, crawler, env, '视频已下载\n')
  112. AliyunLogger.logging(
  113. code="2002",
  114. platform=crawler,
  115. mode=log_type,
  116. message="重复的视频",
  117. data=video_dict,
  118. trace_id=trace_id,
  119. env=env
  120. )
  121. else:
  122. video_dict["out_user_id"] = video_dict["user_id"]
  123. video_dict["platform"] = crawler
  124. video_dict["strategy"] = log_type
  125. video_dict["out_video_id"] = video_dict["video_id"]
  126. video_dict["width"] = 0
  127. video_dict["height"] = 0
  128. video_dict["crawler_rule"] = json.dumps(rule_dict)
  129. video_dict["user_id"] = our_uid
  130. video_dict["publish_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp))
  131. mq.send_msg(video_dict)
  132. AliyunLogger.logging(
  133. code="1002",
  134. platform=crawler,
  135. mode=log_type,
  136. message="成功发送至 ETL",
  137. data=video_dict,
  138. trace_id=trace_id,
  139. env=env
  140. )
  141. except Exception as e:
  142. Common.logger(log_type, crawler).error(f"抓取单条视频异常:{e}\n")
  143. Common.logging(log_type, crawler, env, f"抓取单条视频异常:{e}\n")
  144. AliyunLogger.logging(
  145. code="3000",
  146. platform=crawler,
  147. mode=log_type,
  148. message=f"抓取单条视频异常:{e}\n",
  149. env=env
  150. )
  151. except Exception as e:
  152. Common.logger(log_type, crawler).error(f"抓取第{page}页时异常:{e}\n")
  153. Common.logging(log_type, crawler, env, f"抓取第{page}页时异常:{e}\n")
  154. AliyunLogger.logging(
  155. code="3000",
  156. platform=crawler,
  157. mode=log_type,
  158. message=f"抓取第{page}页时异常:{e}\n",
  159. env=env
  160. )
  161. if __name__ == '__main__':
  162. SuisuiniannianyingfuqiRecommendScheduling.get_videoList(log_type='recommend',
  163. crawler='suisuiniannianyingfuqi',
  164. our_uid=6267140,
  165. rule_dict={},
  166. env='dev')