1234567891011121314151617181920212223242526272829303132333435363738394041 |
- # scheduler_main.py - 爬虫调度主程序
- import asyncio
- import traceback
- import sys
- import os
- from application.config.common import AliyunLogger
- from application.spiders.universal_crawler import AsyncCrawler
- async def main():
- """主函数"""
- # 设置日志
- logger = AliyunLogger(platform="system", mode="manager")
- try:
- # 从环境变量获取配置
- config_topic = os.getenv("CONFIG_TOPIC", "crawler_config")
- config_group = os.getenv("CONFIG_GROUP", "crawler_config_group")
- # 创建爬虫控制器
- controller = AsyncCrawler(
- platform: str,
- mode: str,
- )
- # 启动控制器
- await controller.run()
- # 保持主线程运行
- while True:
- await asyncio.sleep(60)
- except Exception as e:
- tb = traceback.format_exc()
- message = f"主程序发生错误: {e}\n{tb}"
- logger.logging(code="1006", message=message)
- sys.exit(1)
- if __name__ == "__main__":
- # 运行主事件循环
- asyncio.run(main())
|