|
@@ -6,12 +6,19 @@
|
|
|
import time
|
|
|
|
|
|
from tqdm import tqdm
|
|
|
+from pymysql.cursors import DictCursor
|
|
|
+
|
|
|
from applications import WeixinSpider, Functions, llm_sensitivity, log
|
|
|
from coldStartTasks.filter import article_crawler_duplicate_filter
|
|
|
|
|
|
# 常量
|
|
|
ACCOUNT_GOOD_STATUS = 1
|
|
|
+
|
|
|
+# 账号是否每日抓取
|
|
|
ACCOUNT_DAILY_SCRAPE = 1
|
|
|
+ACCOUNT_NOT_DAILY_SCRAPE = 0
|
|
|
+
|
|
|
+# 默认值
|
|
|
DEFAULT_VIEW_COUNT = 0
|
|
|
DEFAULT_LIKE_COUNT = 0
|
|
|
DEFAULT_ARTICLE_STATUS = 1
|
|
@@ -52,6 +59,20 @@ class weixinCategory(object):
|
|
|
]
|
|
|
return result
|
|
|
|
|
|
+ def get_association_account_list(self, date_str):
|
|
|
+ """
|
|
|
+ 获取账号联想的轮询账号
|
|
|
+ """
|
|
|
+ group_id = date_str[-1]
|
|
|
+ sql = f"""
|
|
|
+ select account_id, gh_id, account_name, latest_update_time
|
|
|
+ from long_articles_accounts
|
|
|
+ where account_category = 'account_association' and is_using = {ACCOUNT_DAILY_SCRAPE} and daily_scrape = {ACCOUNT_NOT_DAILY_SCRAPE};
|
|
|
+ """
|
|
|
+ account_list = self.db_client_lam.select(sql, cursor_type=DictCursor)
|
|
|
+ today_crawler_account_list = [i for i in account_list if str(i['account_id'])[-1] == group_id]
|
|
|
+ return today_crawler_account_list
|
|
|
+
|
|
|
def insert_data_into_db(self, gh_id, category, article_list):
|
|
|
"""
|
|
|
将数据更新到数据库
|
|
@@ -176,43 +197,57 @@ class weixinCategory(object):
|
|
|
print("No more data")
|
|
|
return []
|
|
|
|
|
|
- def deal(self, category_list):
|
|
|
+ def crawler_each_category(self, account_list, category):
|
|
|
"""
|
|
|
+ 抓取每个品类
|
|
|
+ :return:
|
|
|
+ """
|
|
|
+ success_records = []
|
|
|
+ for account in tqdm(account_list, desc="crawler_each_category"):
|
|
|
+ try:
|
|
|
+ gh_id = account['gh_id']
|
|
|
+ try:
|
|
|
+ timestamp = int(account['latest_timestamp'].timestamp())
|
|
|
+ except Exception as e:
|
|
|
+ timestamp = DEFAULT_TIMESTAMP
|
|
|
+ success_records += self.update_each_account(
|
|
|
+ gh_id=gh_id,
|
|
|
+ category=category,
|
|
|
+ latest_time_stamp=timestamp
|
|
|
+ )
|
|
|
+ print("success")
|
|
|
+ except Exception as e:
|
|
|
+ print("fail because of {}".format(e))
|
|
|
+ success_titles = [x['title'] for x in success_records]
|
|
|
+ if success_titles:
|
|
|
+ try:
|
|
|
+ sensitive_results = llm_sensitivity.check_titles(success_titles)
|
|
|
+ for record, sensitive_result in zip(success_records, sensitive_results):
|
|
|
+ self.update_article_sensitive_status(
|
|
|
+ category=category,
|
|
|
+ unique_index=record['unique_index'],
|
|
|
+ status=sensitive_result['hit_rule']
|
|
|
+ )
|
|
|
+ except Exception as e:
|
|
|
+ print("failed to update sensitive status: {}".format(e))
|
|
|
|
|
|
+ def deal(self, category_list, date_str):
|
|
|
+ """
|
|
|
:param category_list:
|
|
|
+ :param date_str: YYYY-MM-DD
|
|
|
:return:
|
|
|
"""
|
|
|
+ # daily 品类账号抓取
|
|
|
for category in category_list:
|
|
|
- success_records = []
|
|
|
account_list = self.get_account_list(category)
|
|
|
- for account in tqdm(account_list):
|
|
|
- try:
|
|
|
- gh_id = account['gh_id']
|
|
|
- category = account['category']
|
|
|
- try:
|
|
|
- timestamp = int(account['latest_timestamp'].timestamp())
|
|
|
- except Exception as e:
|
|
|
- timestamp = DEFAULT_TIMESTAMP
|
|
|
- success_records += self.update_each_account(
|
|
|
- gh_id=gh_id,
|
|
|
- category=category,
|
|
|
- latest_time_stamp=timestamp
|
|
|
- )
|
|
|
- print("success")
|
|
|
- except Exception as e:
|
|
|
- print("fail because of {}".format(e))
|
|
|
- success_titles = [x['title'] for x in success_records]
|
|
|
- if success_titles:
|
|
|
- try:
|
|
|
- sensitive_results = llm_sensitivity.check_titles(success_titles)
|
|
|
- for record, sensitive_result in zip(success_records, sensitive_results):
|
|
|
- self.update_article_sensitive_status(
|
|
|
- category=category,
|
|
|
- unique_index=record['unique_index'],
|
|
|
- status=sensitive_result['hit_rule']
|
|
|
- )
|
|
|
- except Exception as e:
|
|
|
- print("failed to update sensitive status: {}".format(e))
|
|
|
+ self.crawler_each_category(account_list=account_list, category=category)
|
|
|
+
|
|
|
+ # 账号联想账号轮询抓取
|
|
|
+ association_account_list = self.get_association_account_list(date_str)
|
|
|
+ self.crawler_each_category(account_list=association_account_list, category="association")
|
|
|
+
|
|
|
+ # 抓完之后,执行相似度打分任务
|
|
|
+ return
|
|
|
|
|
|
def deal_accounts(self, account_list):
|
|
|
"""
|
|
@@ -234,6 +269,7 @@ class weixinCategory(object):
|
|
|
try:
|
|
|
latest_timestamp = account[3].timestamp()
|
|
|
except Exception as e:
|
|
|
+ print(e)
|
|
|
latest_timestamp = DEFAULT_TIMESTAMP
|
|
|
self.update_each_account(
|
|
|
gh_id=gh_id,
|