fix: chat_with_tool_results不重复添加tool结果,修正消息格式

This commit is contained in:
2026-04-15 01:03:10 +08:00
parent 10f67a807a
commit 291de733a4
2 changed files with 5 additions and 14 deletions

View File

@@ -1121,7 +1121,6 @@ async def websocket_endpoint(websocket: WebSocket, user_id: str):
messages=history_with_tools,
provider_config=agent_config['provider'],
agent_config=agent_config['agent'],
tool_results=tool_results,
enable_thinking=enable_thinking
)

View File

@@ -514,17 +514,15 @@ class LLMService:
messages: List[Dict],
provider_config: dict,
agent_config: dict,
tool_results: List[Dict],
enable_thinking: bool = True
) -> Tuple[str, Optional[str]]:
"""
第二阶段调用:将工具执行结果返回给LLM
第二阶段调用:使用包含工具调用和结果的完整消息历史
Args:
messages: 对话历史(包含工具调用和结果)
messages: 已包含assistant tool_calls和tool结果的完整消息历史
provider_config: LLM Provider配置
agent_config: Agent配置
tool_results: 工具执行结果 [{"tool_call_id": "xxx", "content": "..."}]
Returns:
Tuple[str, Optional[str]]: (回复内容, 思考过程)
@@ -535,14 +533,8 @@ class LLMService:
max_tokens = provider_config.get('max_tokens', 4096)
temperature = agent_config.get('temperature_override') or provider_config.get('temperature', 0.7)
# 将工具结果添加到消息历史
# 消息历史已经包含了assistant的tool_calls和tool结果直接使用
final_messages = messages.copy()
for result in tool_results:
final_messages.append({
"role": "tool",
"tool_call_id": result['tool_call_id'],
"content": result['content']
})
# 调用LLM生成最终回复
url = f"{api_base.rstrip('/')}/chat/completions"
@@ -557,14 +549,14 @@ class LLMService:
"max_tokens": max_tokens
}
logger.info(f"工具结果返回LLM: url={url}, model={model}")
logger.info(f"工具结果返回LLM: url={url}, model={model}, 消息数={len(final_messages)}")
try:
async with httpx.AsyncClient(timeout=60.0) as client:
response = await client.post(url, headers=headers, json=payload)
if response.status_code != 200:
logger.error(f"API返回错误: status={response.status_code}")
logger.error(f"API返回错误: status={response.status_code}, body={response.text[:500]}")
response.raise_for_status()
data = response.json()