|
|
@@ -0,0 +1,70 @@
|
|
|
+import time
|
|
|
+import datetime
|
|
|
+from app.core.database import DatabaseManager
|
|
|
+from app.core.config import GlobalConfigSettings
|
|
|
+
|
|
|
+from app.infra.internal import auto_create_crawler_task
|
|
|
+
|
|
|
+import asyncio
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+async def main():
|
|
|
+ config = GlobalConfigSettings()
|
|
|
+
|
|
|
+ mysql_manager = DatabaseManager(config)
|
|
|
+ await mysql_manager.init_pools()
|
|
|
+
|
|
|
+ category_list = ['军事']
|
|
|
+ for c in category_list:
|
|
|
+ query = """
|
|
|
+ select t1.id, article_link, read_cnt, read_median_multiplier
|
|
|
+ from ad_platform_accounts_daily_detail t1
|
|
|
+ join ad_platform_accounts t2 on t1.gh_id = t2.gh_id
|
|
|
+ where cold_start_status = 0
|
|
|
+ and t2.category = %s
|
|
|
+ order by read_median_multiplier desc limit 100;
|
|
|
+ """
|
|
|
+ articles = await mysql_manager.async_fetch(query, params=(c,))
|
|
|
+
|
|
|
+ articles = articles[:100]
|
|
|
+ url_list = [i["article_link"] for i in articles]
|
|
|
+ crawler_plan_response = await auto_create_crawler_task(
|
|
|
+ plan_id=None,
|
|
|
+ plan_name=f"冷启动--{c}-{datetime.date.today().__str__()}-{len(articles)}",
|
|
|
+ plan_tag="互选平台优质账号",
|
|
|
+ platform="weixin",
|
|
|
+ url_list=url_list,
|
|
|
+ )
|
|
|
+ print(crawler_plan_response)
|
|
|
+ create_timestamp = int(time.time()) * 1000
|
|
|
+ crawler_plan_id = crawler_plan_response["data"]["id"]
|
|
|
+ crawler_plan_name = crawler_plan_response["data"]["name"]
|
|
|
+ query = """
|
|
|
+ insert into article_crawler_plan (crawler_plan_id, name, create_timestamp)
|
|
|
+ values (%s, %s, %s)
|
|
|
+ """
|
|
|
+ await mysql_manager.async_save(
|
|
|
+ query=query,
|
|
|
+ params=(crawler_plan_id, crawler_plan_name, create_timestamp),
|
|
|
+ )
|
|
|
+
|
|
|
+ id_list = [i["id"] for i in articles]
|
|
|
+ query = """
|
|
|
+ update ad_platform_accounts_daily_detail
|
|
|
+ set cold_start_status = %s
|
|
|
+ where id in %s and cold_start_status = %s;
|
|
|
+ """
|
|
|
+ affect_rows = await mysql_manager.async_save(
|
|
|
+ query=query,
|
|
|
+ params=(2, tuple(id_list), 0),
|
|
|
+ )
|
|
|
+ print(affect_rows)
|
|
|
+ return
|
|
|
+
|
|
|
+
|
|
|
+ await mysql_manager.close_pools()
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == "__main__":
|
|
|
+ asyncio.run(main())
|