examples/gpt-oss: fix examples (#566)
Some checks are pending
test / test (push) Waiting to run
test / lint (push) Waiting to run

This commit is contained in:
Parth Sareen 2025-08-19 11:08:57 -07:00 committed by GitHub
parent b0f6b99ca6
commit 07ab287cdf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 54 additions and 13 deletions

View File

@ -1,7 +1,17 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "gpt-oss",
# "ollama",
# "rich",
# ]
# ///
import random
from typing import Iterator
from ollama import chat
from rich import print
from ollama import Client
from ollama._types import ChatResponse
@ -40,35 +50,52 @@ available_tools = {'get_weather': get_weather, 'get_weather_conditions': get_wea
messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}]
client = Client(
# Ollama Turbo
# host="https://ollama.com", headers={'Authorization': (os.getenv('OLLAMA_API_KEY'))}
)
model = 'gpt-oss:20b'
model = 'gpt-oss:120b'
# gpt-oss can call tools while "thinking"
# a loop is needed to call the tools and get the results
final = True
while True:
response_stream: Iterator[ChatResponse] = chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions], stream=True)
response_stream: Iterator[ChatResponse] = client.chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions], stream=True)
tool_calls = []
thinking = ''
content = ''
for chunk in response_stream:
if chunk.message.tool_calls:
tool_calls.extend(chunk.message.tool_calls)
if chunk.message.content:
if not (chunk.message.thinking or chunk.message.thinking == '') and final:
print('\nFinal result: ')
print('\n\n' + '=' * 10)
print('Final result: ')
final = False
print(chunk.message.content, end='', flush=True)
if chunk.message.thinking:
# accumulate thinking
thinking += chunk.message.thinking
print(chunk.message.thinking, end='', flush=True)
if thinking != '' or content != '':
messages.append({'role': 'assistant', 'thinking': thinking, 'content': content, 'tool_calls': tool_calls})
print()
if chunk.message.tool_calls:
for tool_call in chunk.message.tool_calls:
if tool_calls:
for tool_call in tool_calls:
function_to_call = available_tools.get(tool_call.function.name)
if function_to_call:
print('\nCalling tool: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments)
print('\nCalling tool:', tool_call.function.name, 'with arguments: ', tool_call.function.arguments)
result = function_to_call(**tool_call.function.arguments)
print('Tool result: ', result + '\n')
messages.append(chunk.message)
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
result_message = {'role': 'tool', 'content': result, 'tool_name': tool_call.function.name}
messages.append(result_message)
else:
print(f'Tool {tool_call.function.name} not found')

View File

@ -1,6 +1,16 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "gpt-oss",
# "ollama",
# "rich",
# ]
# ///
import random
from ollama import chat
from rich import print
from ollama import Client
from ollama._types import ChatResponse
@ -40,11 +50,15 @@ available_tools = {'get_weather': get_weather, 'get_weather_conditions': get_wea
messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}]
client = Client(
# Ollama Turbo
# host="https://ollama.com", headers={'Authorization': (os.getenv('OLLAMA_API_KEY'))}
)
model = 'gpt-oss:20b'
# gpt-oss can call tools while "thinking"
# a loop is needed to call the tools and get the results
while True:
response: ChatResponse = chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions])
response: ChatResponse = client.chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions])
if response.message.content:
print('Content: ')
@ -53,14 +67,14 @@ while True:
print('Thinking: ')
print(response.message.thinking + '\n')
messages.append(response.message)
if response.message.tool_calls:
for tool_call in response.message.tool_calls:
function_to_call = available_tools.get(tool_call.function.name)
if function_to_call:
result = function_to_call(**tool_call.function.arguments)
print('Result from tool call name: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments, 'result: ', result + '\n')
messages.append(response.message)
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
else:
print(f'Tool {tool_call.function.name} not found')