123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226 |
- import time
- import datetime
- from tqdm import tqdm
- from applications.api import feishu_robot
- from applications.crawler.wechat import get_article_detail
- from applications.crawler.wechat import get_article_list_from_account
- class MonitorConst:
- # 文章违规状态
- ILLEGAL_STATUS = 1
- INIT_STATUS = 0
- # 监测周期
- MONITOR_CYCLE = 5 * 24 * 3600
- # article code
- ARTICLE_ILLEGAL_CODE = 25012
- ARTICLE_DELETE_CODE = 25005
- ARTICLE_SUCCESS_CODE = 0
- ARTICLE_UNKNOWN_CODE = 10000
- # Task status
- TASK_SUCCESS_CODE = 2
- TASK_FAIL_CODE = 99
- class OutsideGzhArticlesManager(MonitorConst):
- def __init__(self, pool):
- self.pool = pool
- async def update_article_illegal_status(
- self, article_id: int, illegal_reason: str
- ) -> None:
- query = f"""
- update outside_gzh_account_monitor
- set illegal_status = %s, illegal_reason = %s
- where id = %s and illegal_status = %s
- """
- await self.pool.async_save(
- query=query,
- params=(self.ILLEGAL_STATUS, illegal_reason, article_id, self.INIT_STATUS),
- )
- async def whether_published_in_a_week(self, gh_id: str) -> bool:
- """
- 判断该账号一周内是否有发文,如有,则说无需抓
- """
- query = f"""
- select id, publish_timestamp from outside_gzh_account_monitor
- where gh_id = %s
- order by publish_timestamp desc
- limit %s;
- """
- response = await self.pool.async_fetch(query=query, params=(gh_id, 1))
- if response:
- publish_timestamp = response[0]["publish_timestamp"]
- if publish_timestamp is None:
- return False
- else:
- return int(time.time()) - publish_timestamp <= self.MONITOR_CYCLE
- else:
- return False
- class OutsideGzhArticlesCollector(OutsideGzhArticlesManager):
- async def fetch_outside_account_list(self):
- query = f"""
- select
- t2.group_source_name as account_source,
- t3.name as account_name,
- t3.gh_id as gh_id,
- t3.status as status
- from wx_statistics_group_source t1
- join wx_statistics_group_source_account t2 on t2.group_source_name = t1.account_source_name
- join publish_account t3 on t3.id = t2.account_id
- where
- t1.mode_type = '代运营服务号';
- """
- return await self.pool.async_fetch(query=query, db_name="aigc_db_pool")
- async def fetch_each_account(self, account: dict):
- gh_id = account["gh_id"]
- # 判断该账号本周是否已经发布过
- if await self.whether_published_in_a_week(gh_id):
- return
- fetch_response = get_article_list_from_account(gh_id)
- try:
- msg_list = fetch_response.get("data", {}).get("data", [])
- if msg_list:
- for msg in tqdm(
- msg_list, desc=f"insert account {account['account_name']}"
- ):
- await self.save_each_msg_to_db(msg, account)
- else:
- print(f"crawler failed: {account['account_name']}")
- except Exception as e:
- print(
- f"crawler failed: account_name: {account['account_name']}\n"
- f"error: {e}\n"
- )
- async def save_each_msg_to_db(self, msg: dict, account: dict):
- base_info = msg["AppMsg"]["BaseInfo"]
- detail_info = msg["AppMsg"]["DetailInfo"]
- app_msg_id = base_info["AppMsgId"]
- create_timestamp = base_info["CreateTime"]
- publish_type = base_info["Type"]
- # insert each article
- for article in detail_info:
- link = article["ContentUrl"]
- article_detail = get_article_detail(link)
- response_code = article_detail["code"]
- if response_code == self.ARTICLE_ILLEGAL_CODE:
- illegal_reason = article_detail.get("msg")
- # bot and return
- feishu_robot.bot(
- title="文章违规告警",
- detail={
- "账号名称": article["account_name"],
- "标题": article["title"],
- "违规理由": illegal_reason,
- "发布日期": datetime.datetime.fromtimestamp(create_timestamp).strftime('%Y-%m-%d %H:%M:%S'),
- "账号合作商": article["account_source"],
- },
- env="outside_gzh_monitor",
- mention=False
- )
- elif response_code == self.ARTICLE_SUCCESS_CODE:
- insert_query = f"""
- insert ignore into outside_gzh_account_monitor
- (account_name, gh_id, account_source, account_type, app_msg_id, publish_type, position, title, link,
- channel_content_id, crawler_timestamp, publish_timestamp)
- values
- (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
- """
- await self.pool.async_save(
- query=insert_query,
- params=(
- account["account_name"],
- account["gh_id"],
- account["account_source"],
- "服务号",
- app_msg_id,
- publish_type,
- article["ItemIndex"],
- article["Title"],
- link,
- article_detail["data"]["data"]["channel_content_id"],
- int(time.time()),
- int(article_detail["data"]["data"]["publish_timestamp"] / 1000),
- ),
- )
- else:
- continue
- async def deal(self):
- account_list = await self.fetch_outside_account_list()
- for account in tqdm(account_list):
- try:
- await self.fetch_each_account(account)
- except Exception as e:
- print(f"crawler failed: {account['account_name']}, error: {e}")
- class OutsideGzhArticlesMonitor(OutsideGzhArticlesManager):
- async def fetch_article_list_to_check(self):
- publish_timestamp_threshold = int(time.time()) - self.MONITOR_CYCLE
- fetch_query = f"""
- select id, account_name, gh_id, account_source, account_type,
- title, link, from_unixtime(publish_timestamp) as publish_date
- from outside_gzh_account_monitor
- where illegal_status = {self.INIT_STATUS} and publish_timestamp > {publish_timestamp_threshold};
- """
- return await self.pool.async_fetch(query=fetch_query)
- async def check_each_article(self, article: dict):
- """
- check each article
- """
- link = article["link"]
- article_detail = get_article_detail(link)
- response_code = article_detail["code"]
- if response_code == self.ARTICLE_ILLEGAL_CODE:
- illegal_reason = article_detail.get("msg")
- # illegal_reason = '测试报警功能'
- feishu_robot.bot(
- title="文章违规告警",
- detail={
- "账号名称": article["account_name"],
- "标题": article["title"],
- "违规理由": illegal_reason,
- "发布日期": str(article["publish_date"]),
- "账号合作商": article["account_source"],
- },
- env="outside_gzh_monitor",
- mention=False
- )
- article_id = article["id"]
- await self.update_article_illegal_status(article_id, illegal_reason)
- else:
- return
- async def deal(self):
- article_list = await self.fetch_article_list_to_check()
- for article in tqdm(article_list):
- try:
- await self.check_each_article(article)
- except Exception as e:
- print(
- f"crawler failed: account_name: {article['account_name']}\n"
- f"link: {article['link']}\n"
- f"title: {article['title']}\n"
- f"error: {e}\n"
- )
- return self.TASK_SUCCESS_CODE
|