|
|
@@ -310,15 +310,19 @@ def _to_anthropic_tools(tools: List[Dict]) -> List[Dict]:
|
|
|
def _parse_anthropic_response(result: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
"""Parse an Anthropic Messages API response into the unified format.
|
|
|
|
|
|
- Returns a dict with keys: content, tool_calls, finish_reason, usage.
|
|
|
+ Returns a dict with keys: content, tool_calls, finish_reason, usage, thinking.
|
|
|
"""
|
|
|
content_blocks = result.get("content", [])
|
|
|
|
|
|
text_parts = []
|
|
|
tool_calls = []
|
|
|
+ thinking_parts = []
|
|
|
+
|
|
|
for block in content_blocks:
|
|
|
if block.get("type") == "text":
|
|
|
text_parts.append(block.get("text", ""))
|
|
|
+ elif block.get("type") == "thinking":
|
|
|
+ thinking_parts.append(block.get("thinking", ""))
|
|
|
elif block.get("type") == "tool_use":
|
|
|
tool_calls.append({
|
|
|
"id": block.get("id", ""),
|
|
|
@@ -330,6 +334,7 @@ def _parse_anthropic_response(result: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
})
|
|
|
|
|
|
content = "\n".join(text_parts)
|
|
|
+ thinking = "\n".join(thinking_parts) if thinking_parts else None
|
|
|
|
|
|
stop_reason = result.get("stop_reason", "end_turn")
|
|
|
finish_reason_map = {
|
|
|
@@ -353,6 +358,7 @@ def _parse_anthropic_response(result: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
"tool_calls": tool_calls if tool_calls else None,
|
|
|
"finish_reason": finish_reason,
|
|
|
"usage": usage,
|
|
|
+ "thinking": thinking,
|
|
|
}
|
|
|
|
|
|
|
|
|
@@ -511,6 +517,15 @@ async def _openrouter_anthropic_call(
|
|
|
if "temperature" in kwargs:
|
|
|
payload["temperature"] = kwargs["temperature"]
|
|
|
|
|
|
+ # 可选:启用 extended thinking 模式
|
|
|
+ if kwargs.get("enable_thinking", False):
|
|
|
+ thinking_config = {
|
|
|
+ "type": "enabled",
|
|
|
+ "budget_tokens": kwargs.get("thinking_budget_tokens", 10000)
|
|
|
+ }
|
|
|
+ payload["thinking"] = thinking_config
|
|
|
+ logger.info(f"[OpenRouter/Anthropic] Extended thinking enabled (budget: {thinking_config['budget_tokens']} tokens)")
|
|
|
+
|
|
|
# Debug: 检查 cache_control 是否存在
|
|
|
cache_control_count = 0
|
|
|
if isinstance(system_prompt, list):
|
|
|
@@ -592,6 +607,7 @@ async def _openrouter_anthropic_call(
|
|
|
"finish_reason": parsed["finish_reason"],
|
|
|
"cost": cost,
|
|
|
"usage": usage,
|
|
|
+ "thinking": parsed.get("thinking"),
|
|
|
}
|
|
|
|
|
|
|