openrouter.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196
  1. """
  2. OpenRouter Provider
  3. 使用 OpenRouter API 调用各种模型(包括 Claude Sonnet 4.5)
  4. 支持 OpenAI 兼容的 API 格式
  5. OpenRouter 转发多种模型,需要根据实际模型处理不同的 usage 格式:
  6. - OpenAI 模型: prompt_tokens, completion_tokens, completion_tokens_details.reasoning_tokens
  7. - Claude 模型: input_tokens, output_tokens, cache_creation_input_tokens, cache_read_input_tokens
  8. - DeepSeek 模型: prompt_tokens, completion_tokens, reasoning_tokens
  9. """
  10. import os
  11. import json
  12. import httpx
  13. from typing import List, Dict, Any, Optional
  14. from .usage import TokenUsage, create_usage_from_response
  15. from .pricing import calculate_cost
  16. def _detect_provider_from_model(model: str) -> str:
  17. """根据模型名称检测提供商"""
  18. model_lower = model.lower()
  19. if model_lower.startswith("anthropic/") or "claude" in model_lower:
  20. return "anthropic"
  21. elif model_lower.startswith("openai/") or model_lower.startswith("gpt") or model_lower.startswith("o1") or model_lower.startswith("o3"):
  22. return "openai"
  23. elif model_lower.startswith("deepseek/") or "deepseek" in model_lower:
  24. return "deepseek"
  25. elif model_lower.startswith("google/") or "gemini" in model_lower:
  26. return "gemini"
  27. else:
  28. return "openai" # 默认使用 OpenAI 格式
  29. def _parse_openrouter_usage(usage: Dict[str, Any], model: str) -> TokenUsage:
  30. """
  31. 解析 OpenRouter 返回的 usage
  32. OpenRouter 会根据底层模型返回不同格式的 usage
  33. """
  34. provider = _detect_provider_from_model(model)
  35. # OpenRouter 通常返回 OpenAI 格式,但可能包含额外字段
  36. if provider == "anthropic":
  37. # Claude 模型可能有缓存字段
  38. return TokenUsage(
  39. input_tokens=usage.get("prompt_tokens") or usage.get("input_tokens", 0),
  40. output_tokens=usage.get("completion_tokens") or usage.get("output_tokens", 0),
  41. cache_creation_tokens=usage.get("cache_creation_input_tokens", 0),
  42. cache_read_tokens=usage.get("cache_read_input_tokens", 0),
  43. )
  44. elif provider == "deepseek":
  45. # DeepSeek 可能有 reasoning_tokens
  46. return TokenUsage(
  47. input_tokens=usage.get("prompt_tokens", 0),
  48. output_tokens=usage.get("completion_tokens", 0),
  49. reasoning_tokens=usage.get("reasoning_tokens", 0),
  50. )
  51. else:
  52. # OpenAI 格式(包括 o1/o3 的 reasoning_tokens)
  53. reasoning = 0
  54. if details := usage.get("completion_tokens_details"):
  55. reasoning = details.get("reasoning_tokens", 0)
  56. return TokenUsage(
  57. input_tokens=usage.get("prompt_tokens", 0),
  58. output_tokens=usage.get("completion_tokens", 0),
  59. reasoning_tokens=reasoning,
  60. )
  61. async def openrouter_llm_call(
  62. messages: List[Dict[str, Any]],
  63. model: str = "anthropic/claude-sonnet-4.5",
  64. tools: Optional[List[Dict]] = None,
  65. **kwargs
  66. ) -> Dict[str, Any]:
  67. """
  68. OpenRouter LLM 调用函数
  69. Args:
  70. messages: OpenAI 格式消息列表
  71. model: 模型名称(如 "anthropic/claude-sonnet-4.5")
  72. tools: OpenAI 格式工具定义
  73. **kwargs: 其他参数(temperature, max_tokens 等)
  74. Returns:
  75. {
  76. "content": str,
  77. "tool_calls": List[Dict] | None,
  78. "prompt_tokens": int,
  79. "completion_tokens": int,
  80. "finish_reason": str,
  81. "cost": float
  82. }
  83. """
  84. api_key = os.getenv("OPEN_ROUTER_API_KEY")
  85. if not api_key:
  86. raise ValueError("OPEN_ROUTER_API_KEY environment variable not set")
  87. base_url = "https://openrouter.ai/api/v1"
  88. endpoint = f"{base_url}/chat/completions"
  89. # 构建请求
  90. payload = {
  91. "model": model,
  92. "messages": messages,
  93. }
  94. # 添加可选参数
  95. if tools:
  96. payload["tools"] = tools
  97. if "temperature" in kwargs:
  98. payload["temperature"] = kwargs["temperature"]
  99. if "max_tokens" in kwargs:
  100. payload["max_tokens"] = kwargs["max_tokens"]
  101. # OpenRouter 特定参数
  102. headers = {
  103. "Authorization": f"Bearer {api_key}",
  104. "HTTP-Referer": "https://github.com/your-repo", # 可选,用于统计
  105. "X-Title": "Agent Framework", # 可选,显示在 OpenRouter dashboard
  106. }
  107. # 调用 API
  108. async with httpx.AsyncClient(timeout=120.0) as client:
  109. try:
  110. response = await client.post(endpoint, json=payload, headers=headers)
  111. response.raise_for_status()
  112. result = response.json()
  113. except httpx.HTTPStatusError as e:
  114. error_body = e.response.text
  115. print(f"[OpenRouter] Error {e.response.status_code}: {error_body}")
  116. raise
  117. except Exception as e:
  118. print(f"[OpenRouter] Request failed: {e}")
  119. raise
  120. # 解析响应(OpenAI 格式)
  121. choice = result["choices"][0] if result.get("choices") else {}
  122. message = choice.get("message", {})
  123. content = message.get("content", "")
  124. tool_calls = message.get("tool_calls")
  125. finish_reason = choice.get("finish_reason") # stop, length, tool_calls, content_filter 等
  126. # 提取 usage(完整版,根据模型类型解析)
  127. raw_usage = result.get("usage", {})
  128. usage = _parse_openrouter_usage(raw_usage, model)
  129. # 计算费用
  130. cost = calculate_cost(model, usage)
  131. return {
  132. "content": content,
  133. "tool_calls": tool_calls,
  134. "prompt_tokens": usage.input_tokens,
  135. "completion_tokens": usage.output_tokens,
  136. "reasoning_tokens": usage.reasoning_tokens,
  137. "cache_creation_tokens": usage.cache_creation_tokens,
  138. "cache_read_tokens": usage.cache_read_tokens,
  139. "finish_reason": finish_reason,
  140. "cost": cost,
  141. "usage": usage, # 完整的 TokenUsage 对象
  142. }
  143. def create_openrouter_llm_call(
  144. model: str = "anthropic/claude-sonnet-4.5"
  145. ):
  146. """
  147. 创建 OpenRouter LLM 调用函数
  148. Args:
  149. model: 模型名称
  150. - "anthropic/claude-sonnet-4.5"
  151. - "anthropic/claude-opus-4.5"
  152. - "openai/gpt-4o"
  153. 等等
  154. Returns:
  155. 异步 LLM 调用函数
  156. """
  157. async def llm_call(
  158. messages: List[Dict[str, Any]],
  159. model: str = model,
  160. tools: Optional[List[Dict]] = None,
  161. **kwargs
  162. ) -> Dict[str, Any]:
  163. return await openrouter_llm_call(messages, model, tools, **kwargs)
  164. return llm_call