openrouter.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. """
  2. OpenRouter Provider
  3. 使用 OpenRouter API 调用各种模型(包括 Claude Sonnet 4.5)
  4. 支持 OpenAI 兼容的 API 格式
  5. """
  6. import os
  7. import json
  8. import httpx
  9. from typing import List, Dict, Any, Optional
  10. async def openrouter_llm_call(
  11. messages: List[Dict[str, Any]],
  12. model: str = "anthropic/claude-sonnet-4.5",
  13. tools: Optional[List[Dict]] = None,
  14. **kwargs
  15. ) -> Dict[str, Any]:
  16. """
  17. OpenRouter LLM 调用函数
  18. Args:
  19. messages: OpenAI 格式消息列表
  20. model: 模型名称(如 "anthropic/claude-sonnet-4.5")
  21. tools: OpenAI 格式工具定义
  22. **kwargs: 其他参数(temperature, max_tokens 等)
  23. Returns:
  24. {
  25. "content": str,
  26. "tool_calls": List[Dict] | None,
  27. "prompt_tokens": int,
  28. "completion_tokens": int,
  29. "finish_reason": str,
  30. "cost": float
  31. }
  32. """
  33. api_key = os.getenv("OPEN_ROUTER_API_KEY")
  34. if not api_key:
  35. raise ValueError("OPEN_ROUTER_API_KEY environment variable not set")
  36. base_url = "https://openrouter.ai/api/v1"
  37. endpoint = f"{base_url}/chat/completions"
  38. # 构建请求
  39. payload = {
  40. "model": model,
  41. "messages": messages,
  42. }
  43. # 添加可选参数
  44. if tools:
  45. payload["tools"] = tools
  46. if "temperature" in kwargs:
  47. payload["temperature"] = kwargs["temperature"]
  48. if "max_tokens" in kwargs:
  49. payload["max_tokens"] = kwargs["max_tokens"]
  50. # OpenRouter 特定参数
  51. headers = {
  52. "Authorization": f"Bearer {api_key}",
  53. "HTTP-Referer": "https://github.com/your-repo", # 可选,用于统计
  54. "X-Title": "Agent Framework", # 可选,显示在 OpenRouter dashboard
  55. }
  56. # 调用 API
  57. async with httpx.AsyncClient(timeout=120.0) as client:
  58. try:
  59. response = await client.post(endpoint, json=payload, headers=headers)
  60. response.raise_for_status()
  61. result = response.json()
  62. except httpx.HTTPStatusError as e:
  63. error_body = e.response.text
  64. print(f"[OpenRouter] Error {e.response.status_code}: {error_body}")
  65. raise
  66. except Exception as e:
  67. print(f"[OpenRouter] Request failed: {e}")
  68. raise
  69. # 解析响应(OpenAI 格式)
  70. choice = result["choices"][0] if result.get("choices") else {}
  71. message = choice.get("message", {})
  72. content = message.get("content", "")
  73. tool_calls = message.get("tool_calls")
  74. finish_reason = choice.get("finish_reason") # stop, length, tool_calls, content_filter 等
  75. # 提取 usage
  76. usage = result.get("usage", {})
  77. prompt_tokens = usage.get("prompt_tokens", 0)
  78. completion_tokens = usage.get("completion_tokens", 0)
  79. # 计算成本(OpenRouter 通常在响应中提供,但这里简化为 0)
  80. cost = 0.0
  81. return {
  82. "content": content,
  83. "tool_calls": tool_calls,
  84. "prompt_tokens": prompt_tokens,
  85. "completion_tokens": completion_tokens,
  86. "finish_reason": finish_reason,
  87. "cost": cost
  88. }
  89. def create_openrouter_llm_call(
  90. model: str = "anthropic/claude-sonnet-4.5"
  91. ):
  92. """
  93. 创建 OpenRouter LLM 调用函数
  94. Args:
  95. model: 模型名称
  96. - "anthropic/claude-sonnet-4.5"
  97. - "anthropic/claude-opus-4.5"
  98. - "openai/gpt-4o"
  99. 等等
  100. Returns:
  101. 异步 LLM 调用函数
  102. """
  103. async def llm_call(
  104. messages: List[Dict[str, Any]],
  105. model: str = model,
  106. tools: Optional[List[Dict]] = None,
  107. **kwargs
  108. ) -> Dict[str, Any]:
  109. return await openrouter_llm_call(messages, model, tools, **kwargs)
  110. return llm_call