|
@@ -1,6 +1,7 @@
|
|
import json
|
|
import json
|
|
from typing import List, Optional
|
|
from typing import List, Optional
|
|
|
|
|
|
|
|
+import pqai_agent.utils
|
|
from pqai_agent.agent import DEFAULT_MAX_RUN_STEPS
|
|
from pqai_agent.agent import DEFAULT_MAX_RUN_STEPS
|
|
from pqai_agent.chat_service import OpenAICompatible
|
|
from pqai_agent.chat_service import OpenAICompatible
|
|
from pqai_agent.logging import logger
|
|
from pqai_agent.logging import logger
|
|
@@ -35,25 +36,26 @@ class SimpleOpenAICompatibleChatAgent:
|
|
self.tool_map[tool.name] = tool
|
|
self.tool_map[tool.name] = tool
|
|
|
|
|
|
def run(self, user_input: str) -> str:
|
|
def run(self, user_input: str) -> str:
|
|
|
|
+ run_id = pqai_agent.utils.random_str()[:12]
|
|
messages = [{"role": "system", "content": self.system_prompt}]
|
|
messages = [{"role": "system", "content": self.system_prompt}]
|
|
tools = [tool.get_openai_tool_schema() for tool in self.tools]
|
|
tools = [tool.get_openai_tool_schema() for tool in self.tools]
|
|
messages.append({"role": "user", "content": user_input})
|
|
messages.append({"role": "user", "content": user_input})
|
|
|
|
|
|
n_steps = 0
|
|
n_steps = 0
|
|
- logger.debug(f"start agent loop. messages: {messages}")
|
|
|
|
|
|
+ logger.debug(f"run_id[{run_id}] start agent loop. messages: {messages}")
|
|
while n_steps < self.max_run_step:
|
|
while n_steps < self.max_run_step:
|
|
response = self.llm_client.chat.completions.create(model=self.model, messages=messages, tools=tools, **self.generate_cfg)
|
|
response = self.llm_client.chat.completions.create(model=self.model, messages=messages, tools=tools, **self.generate_cfg)
|
|
message = response.choices[0].message
|
|
message = response.choices[0].message
|
|
self.total_input_tokens += response.usage.prompt_tokens
|
|
self.total_input_tokens += response.usage.prompt_tokens
|
|
self.total_output_tokens += response.usage.completion_tokens
|
|
self.total_output_tokens += response.usage.completion_tokens
|
|
messages.append(message)
|
|
messages.append(message)
|
|
- logger.debug(f"current step content: {message.content}")
|
|
|
|
|
|
+ logger.debug(f"run_id[{run_id}] current step content: {message.content}")
|
|
|
|
|
|
if message.tool_calls:
|
|
if message.tool_calls:
|
|
for tool_call in message.tool_calls:
|
|
for tool_call in message.tool_calls:
|
|
function_name = tool_call.function.name
|
|
function_name = tool_call.function.name
|
|
arguments = json.loads(tool_call.function.arguments)
|
|
arguments = json.loads(tool_call.function.arguments)
|
|
- logger.debug(f"call function[{function_name}], parameter: {arguments}")
|
|
|
|
|
|
+ logger.debug(f"run_id[{run_id}] call function[{function_name}], parameter: {arguments}")
|
|
|
|
|
|
if function_name in self.tool_map:
|
|
if function_name in self.tool_map:
|
|
result = self.tool_map[function_name](**arguments)
|
|
result = self.tool_map[function_name](**arguments)
|
|
@@ -68,7 +70,7 @@ class SimpleOpenAICompatibleChatAgent:
|
|
"result": result
|
|
"result": result
|
|
})
|
|
})
|
|
else:
|
|
else:
|
|
- logger.error(f"Function {function_name} not found in tool map.")
|
|
|
|
|
|
+ logger.error(f"run_id[{run_id}] Function {function_name} not found in tool map.")
|
|
raise Exception(f"Function {function_name} not found in tool map.")
|
|
raise Exception(f"Function {function_name} not found in tool map.")
|
|
else:
|
|
else:
|
|
return message.content
|
|
return message.content
|