|
@@ -134,7 +134,7 @@ class ThreadRunner:
|
|
|
tool_call_messages += (
|
|
|
self.__convert_assistant_tool_calls_to_chat_messages(step)
|
|
|
)
|
|
|
- tool_call_messages = tool_call_messages[:2]
|
|
|
+ tool_call_messages = tool_call_messages[:1]
|
|
|
# memory
|
|
|
messages = (
|
|
|
assistant_system_message
|
|
@@ -144,6 +144,7 @@ class ThreadRunner:
|
|
|
|
|
|
logging.info("messages: run %s", run)
|
|
|
logging.info(messages)
|
|
|
+ logging.info(tools)
|
|
|
response_stream = llm.run(
|
|
|
messages=messages,
|
|
|
model=run.model,
|