I am looking for a node other than llm_node that I can grab any tool calls or agent ChatMessage’s produced by the llm. Right now I have to do some preprocessing ChatChunk-by-ChatChunk:
async def llm_node(self, chat_ctx: ChatContext, tools, model_settings: ModelSettings):
"""overriding this only to see how many items of type=="message" it receives"""
logger.debug(“llm_node hit with %d items in chat_ctx”, len(chat_ctx.messages()))
agent_msg = “”
tool_calls: list[FunctionToolCall] = []
async for chunk in Agent.default.llm_node(self, chat_ctx, tools, model_settings):
delta = chunk.delta
if delta and delta.tool_calls:
for tc in delta.tool_calls:
tool_calls.append(tc)
# chunk should always be a ChatChunk, NOT a str
# TODO: handle when chunk.delta.tool_calls is populated (tool call was made?)
if delta and chunk.delta.content:
agent_msg += chunk.delta.content
yield chunk