dou_yin_keywords_search.py 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203
  1. import json
  2. from typing import List, Any, Optional
  3. from simpleeval import simple_eval
  4. from client.CrawlerClient import CrawlerClient
  5. from model.automation_provide_job import DouYinSearchConfig, ChannelSearchAndDetailDTO, SearchFilterConfigItem
  6. from util.automation_provide_util import AutoProvideUtil
  7. crawler_client = CrawlerClient()
  8. preFilterThreshold = 3
  9. result_txt_file = '/Users/zhao/Desktop/tzld/文档/分析文档/关键词分析.txt'
  10. def write_result_file(content, mode='a+'):
  11. with open(result_txt_file, mode) as f:
  12. f.write(content)
  13. f.write("\n")
  14. def log_info_print_title():
  15. write_result_file(
  16. "视频ID,品类,关键词,爬取计划,结果,原因,搜索使用的账号ID,排序方式,站外视频ID,站外账号ID,过滤结果,分享量,点赞量,分享量/点赞量,视频时长(秒),观众年龄50+占比,观众年龄50+TGI,过滤规则表达式", 'w')
  17. def log_info_print(log_json: dict[str, Any], account_id: Optional[int] = None):
  18. if 'ext' in log_json and isinstance(log_json['ext'], dict):
  19. log_json['ext'] = json.dumps(log_json['ext'], ensure_ascii=False)
  20. if 'modelValueConfig' and isinstance(log_json['modelValueConfig'], dict):
  21. log_json['modelValueConfig'] = json.dumps(log_json['modelValueConfig'], ensure_ascii=False)
  22. video_id = log_json["videoId"]
  23. keywords = log_json['keywords']
  24. crawler_plan_id = log_json.get("crawlerPlanId", "")
  25. result = log_json.get("result", False)
  26. reason = log_json.get("reason", "")
  27. merge_cate2 = log_json['mergeSecondLevelCate']
  28. sort_type = json.loads(log_json.get("modelValueConfig", "{}")).get("sortType")
  29. ext_json = json.loads(log_json.get("ext", "{}"))
  30. account_id = account_id if account_id else 0
  31. if not ext_json:
  32. write_result_file(f"{video_id},{merge_cate2},{keywords},'{crawler_plan_id},'{result},{reason},{account_id},{sort_type}")
  33. return
  34. for channel_content_id in ext_json:
  35. channel_ext_info = ext_json[channel_content_id]
  36. filter_result = channel_ext_info.get("result", False)
  37. rule_str = channel_ext_info.get("rule", "")
  38. rule_context = channel_ext_info.get('ruleContext', {})
  39. share_cnt = rule_context.get('shareCnt', 0)
  40. video_duration_s = rule_context.get('videoDuration_s', 0)
  41. like_cnt = rule_context.get('likeCnt', 0)
  42. audience_age_50_rate = rule_context.get('audienceAge50Rate', 0)
  43. audience_age_50_tgi = rule_context.get('audienceAge50TGI', 0)
  44. share_div_link = rule_context.get('shareDivLink', 0)
  45. channel_account_id = ""
  46. if "contentDetail" in channel_ext_info:
  47. channel_account_id = channel_ext_info["contentDetail"].get("channelAccountId")
  48. elif "fanPortrait" in channel_ext_info:
  49. channel_account_id = channel_ext_info["fanPortrait"].get("channelAccountId")
  50. write_result_file(f"{video_id},{merge_cate2},{keywords},'{crawler_plan_id},'{result},{reason},{account_id},{sort_type},'{channel_content_id},{channel_account_id},{filter_result},"
  51. f"{share_cnt},{like_cnt},{share_div_link},{video_duration_s},{audience_age_50_rate},{audience_age_50_tgi},{rule_str}")
  52. def keywords_search(keywords: str, sort_type: str, account_id=None) -> List[ChannelSearchAndDetailDTO]:
  53. search_config = DouYinSearchConfig(
  54. search_content=keywords,
  55. sort_type=sort_type,
  56. account_id=account_id
  57. )
  58. return crawler_client.dou_yin_keywords_search(search_config, True, True)
  59. def eval_expr(expr: str, context: dict) -> bool:
  60. expr = expr.replace("&&", " and ").replace("||", " or ")
  61. return bool(simple_eval(expr, names=context))
  62. def keywords_search_and_filter(keywords: str, sort_type: str, account_id: int, log_json: dict[str, Any], filters: List[SearchFilterConfigItem]) -> dict[str, Any]:
  63. need_copy_keys = ["videoId", "accountFilters", "contentFilters", "mergeSecondLevelCate", "keywords"]
  64. result_json = {}
  65. for key in need_copy_keys:
  66. result_json[key] = log_json.get(key)
  67. log_ext_info = {}
  68. result_json['ext'] = log_ext_info
  69. result_json['result'] = True
  70. result_json['modelValueConfig'] = {"sortType": sort_type}
  71. rule_str = AutoProvideUtil.parse_filter_config_to_rule_str(filters)
  72. channel_search_and_detail_dtos = keywords_search(keywords, sort_type, account_id)
  73. if not channel_search_and_detail_dtos:
  74. result_json["result"] = False
  75. result_json['reason'] = '关键词搜索结果为空'
  76. return result_json
  77. cnt = 0
  78. for channel_search_and_detail_dto in channel_search_and_detail_dtos:
  79. channel_content_id = channel_search_and_detail_dto.channel_content_id
  80. channel_account_id = channel_search_and_detail_dto.channel_account_id
  81. content_detail = channel_search_and_detail_dto.content_detail
  82. fans_portrait = channel_search_and_detail_dto.fans_portrait
  83. ext_json = {}
  84. log_ext_info[channel_content_id] = ext_json
  85. if content_detail:
  86. content_detail['channelAccountId'] = channel_account_id
  87. content_detail['channelContentId'] = channel_content_id
  88. ext_json['contentDetail'] = content_detail
  89. if fans_portrait:
  90. fans_portrait['channelAccountId'] = channel_account_id
  91. fans_portrait['channelContentId'] = channel_content_id
  92. ext_json['fanPortrait'] = fans_portrait
  93. if (not content_detail) and (not fans_portrait):
  94. ext_json["result"] = False
  95. continue
  96. rule_context = AutoProvideUtil.extract_content_rule_feature(content_detail=content_detail, fans_portrait=fans_portrait)
  97. ext_json['ruleContext'] = rule_context
  98. ext_json['rule'] = rule_str
  99. if not rule_context:
  100. cnt += 1
  101. continue
  102. result = eval_expr(expr=rule_str, context=rule_context)
  103. ext_json['result'] = result
  104. if result:
  105. cnt += 1
  106. if cnt <= preFilterThreshold:
  107. log_json["result"] = False
  108. log_json['reason'] = '该关键词首页满足条件的视频数不足'
  109. return {}
  110. def keywords_not_login_comprehensive_sort(keywords: str, log_json: dict[str, Any], filters: List[SearchFilterConfigItem]):
  111. """
  112. 未登录,综合排序
  113. """
  114. account_id = 0
  115. log_json = keywords_search_and_filter(keywords=keywords, sort_type="综合排序", account_id=account_id, log_json=log_json, filters=filters)
  116. log_info_print(log_json, account_id=account_id)
  117. def keywords_login_comprehensive_sort(keywords: str, log_json: dict[str, Any], filters: List[SearchFilterConfigItem]):
  118. """
  119. 登录,综合排序
  120. """
  121. account_id = 771431186
  122. log_json = keywords_search_and_filter(keywords=keywords, sort_type="综合排序", account_id=account_id, log_json=log_json, filters=filters)
  123. log_info_print(log_json, account_id=account_id)
  124. def keywords_login_like_sort(keywords: str, log_json: dict[str, Any], filters: List[SearchFilterConfigItem]):
  125. """
  126. 登录状态,最多点赞
  127. """
  128. account_id = 771431186
  129. log_json = keywords_search_and_filter(keywords=keywords, sort_type="最多点赞", account_id=account_id, log_json=log_json, filters=filters)
  130. log_info_print(log_json, account_id=account_id)
  131. def handle_log_json(log_json: dict[str, Any]):
  132. log_info_print(log_json)
  133. # 未登录,最多点赞
  134. keywords = log_json['keywords']
  135. account_filters = json.loads(log_json.get("accountFilters", "[]"))
  136. content_filters = json.loads(log_json.get("contentFilters", '[]'))
  137. search_filter_config_tems = []
  138. for filter_item in account_filters + content_filters:
  139. search_filter_config_tems.append(SearchFilterConfigItem(**filter_item))
  140. keywords_not_login_comprehensive_sort(keywords, log_json, search_filter_config_tems)
  141. keywords_login_comprehensive_sort(keywords, log_json, search_filter_config_tems)
  142. keywords_login_like_sort(keywords, log_json, search_filter_config_tems)
  143. def main():
  144. file_path = "/Users/zhao/Downloads/keywords_filter_test_sample.json"
  145. log_list = []
  146. with open(file_path, "r", encoding="utf-8") as f:
  147. line = f.readline()
  148. while line:
  149. log_list.append(json.loads(line))
  150. line = f.readline()
  151. log_info_print_title()
  152. for log in log_list:
  153. handle_log_json(log)
  154. if __name__ == '__main__':
  155. main()