This commit is contained in:
Krishna Chaitanya 2026-03-24 19:22:27 -07:00 committed by GitHub
commit fd15f07f4a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 14 additions and 12 deletions

View File

@ -62,6 +62,9 @@ async def main():
if response.message.tool_calls:
# There may be multiple tool calls in the response
# Add the assistant message with tool calls to the conversation
messages.append(response.message)
for tool in response.message.tool_calls:
# Ensure the function is available, and then call it
if function_to_call := available_functions.get(tool.function.name):
@ -71,14 +74,12 @@ async def main():
print('Function output:', output)
else:
print('Function', tool.function.name, 'not found')
output = 'Function not found'
# Only needed to chat with the model using the tool call results
if response.message.tool_calls:
# Add the function response to messages for the model to use
messages.append(response.message)
messages.append({'role': 'tool', 'content': str(output), 'tool_name': tool.function.name})
# Add each tool result as a separate message
messages.append({'role': 'tool', 'content': str(output), 'tool_name': tool.function.name})
# Get final response from model with function outputs
# Get final response from model with all tool call results
final_response = await client.chat('llama3.1', messages=messages)
print('Final response:', final_response.message.content)

View File

@ -60,6 +60,9 @@ response: ChatResponse = chat(
if response.message.tool_calls:
# There may be multiple tool calls in the response
# Add the assistant message with tool calls to the conversation
messages.append(response.message)
for tool in response.message.tool_calls:
# Ensure the function is available, and then call it
if function_to_call := available_functions.get(tool.function.name):
@ -69,14 +72,12 @@ if response.message.tool_calls:
print('Function output:', output)
else:
print('Function', tool.function.name, 'not found')
output = 'Function not found'
# Only needed to chat with the model using the tool call results
if response.message.tool_calls:
# Add the function response to messages for the model to use
messages.append(response.message)
messages.append({'role': 'tool', 'content': str(output), 'tool_name': tool.function.name})
# Add each tool result as a separate message
messages.append({'role': 'tool', 'content': str(output), 'tool_name': tool.function.name})
# Get final response from model with function outputs
# Get final response from model with all tool call results
final_response = chat('llama3.1', messages=messages)
print('Final response:', final_response.message.content)