@@ -1,6 +1,6 @@
reload = true
bind = "0.0.0.0:6060"
-workers = 6
+workers = 8
keep_alive_timeout = 120 # 保持连接的最大秒数,根据需要调整
graceful_timeout = 30 # 重启或停止之前等待当前工作完成的时间
loglevel = "warning" # 日志级别
@@ -331,4 +331,7 @@ class CrawlerGzhSearchArticles(CrawlerGzhBaseStrategy):
hot_titles = await self.get_hot_titles_with_strategy(strategy)
for hot_title in hot_titles:
print("hot title:", hot_title)
- await self.search_each_title(hot_title)
+ try:
+ await self.search_each_title(hot_title)
+ except Exception as e:
+ print(f"crawler_gzh_articles error:{e}")
@@ -8,3 +8,7 @@ prompt = "你好"
res = fetch_deepseek_completion(model="defa", prompt=prompt)
print(res)
+"""
+curl -X POST http://127.0.0.1:6060/api/run_task -H "Content-Type: application/json" -d '{"task_name": "crawler_gzh_articles", "account_method": "search", "crawl_mode": "search", "strategy": "V1"}'
+