implemented shape update

This commit is contained in:
nicole pardal 2025-09-22 15:16:58 -07:00
parent d0f71bc8b8
commit e69184decf
4 changed files with 53 additions and 85 deletions

View File

@ -7,58 +7,39 @@
# ///
from typing import Union
from rich import print
from ollama import WebCrawlResponse, WebSearchResponse, chat, web_crawl, web_search
from ollama import Client, WebFetchResponse, WebSearchResponse
def format_tool_results(results: Union[WebSearchResponse, WebCrawlResponse]):
def format_tool_results(results: Union[WebSearchResponse, WebFetchResponse]):
if isinstance(results, WebSearchResponse):
if not results.success:
error_msg = ', '.join(results.errors) if results.errors else 'Unknown error'
return f'Web search failed: {error_msg}'
output = []
for query, search_results in results.results.items():
output.append(f'Search results for "{query}":')
for i, result in enumerate(search_results, 1):
output.append(f'{i}. {result.title}')
output.append(f' URL: {result.url}')
output.append(f' Content: {result.content}')
output.append('')
for i, result in enumerate(results.results, 1):
output.append(f'{i}. {result.content}')
output.append('')
return '\n'.join(output).rstrip()
elif isinstance(results, WebCrawlResponse):
if not results.success:
error_msg = ', '.join(results.errors) if results.errors else 'Unknown error'
return f'Web crawl failed: {error_msg}'
output = []
for url, crawl_results in results.results.items():
output.append(f'Crawl results for "{url}":')
for i, result in enumerate(crawl_results, 1):
output.append(f'{i}. {result.title}')
output.append(f' URL: {result.url}')
output.append(f' Content: {result.content}')
if result.links:
output.append(f' Links: {", ".join(result.links)}')
output.append('')
elif isinstance(results, WebFetchResponse):
output = [
f'Title: {results.title}',
f'Content: {results.content}',
]
if results.links:
output.append(f'Links: {", ".join(results.links)}')
output.append('')
return '\n'.join(output).rstrip()
# Set OLLAMA_API_KEY in the environment variable or use the headers parameter to set the authorization header
# client = Client(headers={'Authorization': 'Bearer <OLLAMA_API_KEY>'})
available_tools = {'web_search': web_search, 'web_crawl': web_crawl}
client = Client(headers={'Authorization': (os.getenv('OLLAMA_API_KEY'))})
available_tools = {'web_search': client.web_search, 'web_fetch': client.web_fetch}
query = "ollama's new engine"
print('Query: ', query)
messages = [{'role': 'user', 'content': query}]
while True:
response = chat(model='qwen3', messages=messages, tools=[web_search, web_crawl], think=True)
response = client.chat(model='qwen3', messages=messages, tools=[client.web_search, client.web_fetch], think=True)
if response.message.thinking:
print('Thinking: ')
print(response.message.thinking + '\n\n')
@ -72,7 +53,7 @@ while True:
for tool_call in response.message.tool_calls:
function_to_call = available_tools.get(tool_call.function.name)
if function_to_call:
result: WebSearchResponse | WebCrawlResponse = function_to_call(**tool_call.function.arguments)
result: Union[WebSearchResponse, WebFetchResponse] = function_to_call(**tool_call.function.arguments)
print('Result from tool call name: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments)
print('Result: ', format_tool_results(result)[:200])

View File

@ -15,7 +15,7 @@ from ollama._types import (
ShowResponse,
StatusResponse,
Tool,
WebCrawlResponse,
WebFetchResponse,
WebSearchResponse,
)
@ -37,7 +37,7 @@ __all__ = [
'ShowResponse',
'StatusResponse',
'Tool',
'WebCrawlResponse',
'WebFetchResponse',
'WebSearchResponse',
]
@ -56,4 +56,4 @@ copy = _client.copy
show = _client.show
ps = _client.ps
web_search = _client.web_search
web_crawl = _client.web_crawl
web_fetch = _client.web_fetch

View File

@ -66,8 +66,8 @@ from ollama._types import (
ShowResponse,
StatusResponse,
Tool,
WebCrawlRequest,
WebCrawlResponse,
WebFetchRequest,
WebFetchResponse,
WebSearchRequest,
WebSearchResponse,
)
@ -633,12 +633,12 @@ class Client(BaseClient):
'/api/ps',
)
def web_search(self, queries: Sequence[str], max_results: int = 3) -> WebSearchResponse:
def web_search(self, query: str, max_results: int = 3) -> WebSearchResponse:
"""
Performs a web search
Args:
queries: The queries to search for
query: The query to search for
max_results: The maximum number of results to return.
Returns:
@ -652,34 +652,32 @@ class Client(BaseClient):
return self._request(
WebSearchResponse,
'POST',
'https://ollama.com/api/web_search',
'http://localhost:8080/api/web_search',
json=WebSearchRequest(
queries=queries,
query=query,
max_results=max_results,
).model_dump(exclude_none=True),
)
def web_crawl(self, urls: Sequence[str]) -> WebCrawlResponse:
def web_fetch(self, url: str) -> WebFetchResponse:
"""
Gets the content of web pages for the provided URLs.
Fetches the content of a web page for the provided URL.
Args:
urls: The URLs to crawl
url: The URL to fetch
Returns:
WebCrawlResponse with the crawl results
Raises:
ValueError: If OLLAMA_API_KEY environment variable is not set
WebFetchResponse with the fetched result
"""
if not self._client.headers.get('authorization', '').startswith('Bearer '):
raise ValueError('Authorization header with Bearer token is required for web fetch')
return self._request(
WebCrawlResponse,
WebFetchResponse,
'POST',
'https://ollama.com/api/web_crawl',
json=WebCrawlRequest(
urls=urls,
'http://localhost:8080/api/web_fetch',
json=WebFetchRequest(
url=url,
).model_dump(exclude_none=True),
)
@ -752,12 +750,12 @@ class AsyncClient(BaseClient):
return cls(**(await self._request_raw(*args, **kwargs)).json())
async def websearch(self, queries: Sequence[str], max_results: int = 3) -> WebSearchResponse:
async def websearch(self, query: str, max_results: int = 3) -> WebSearchResponse:
"""
Performs a web search
Args:
queries: The queries to search for
query: The query to search for
max_results: The maximum number of results to return.
Returns:
@ -766,29 +764,29 @@ class AsyncClient(BaseClient):
return await self._request(
WebSearchResponse,
'POST',
'https://ollama.com/api/web_search',
'/api/web_search',
json=WebSearchRequest(
queries=queries,
query=query,
max_results=max_results,
).model_dump(exclude_none=True),
)
async def webcrawl(self, urls: Sequence[str]) -> WebCrawlResponse:
async def webfetch(self, url: str) -> WebFetchResponse:
"""
Gets the content of web pages for the provided URLs.
Fetches the content of a web page for the provided URL.
Args:
urls: The URLs to crawl
url: The URL to fetch
Returns:
WebCrawlResponse with the crawl results
WebFetchResponse with the fetched result
"""
return await self._request(
WebCrawlResponse,
WebFetchResponse,
'POST',
'https://ollama.com/api/web_crawl',
json=WebCrawlRequest(
urls=urls,
'/api/web_fetch',
json=WebFetchRequest(
url=url,
).model_dump(exclude_none=True),
)

View File

@ -542,37 +542,26 @@ class ProcessResponse(SubscriptableBaseModel):
class WebSearchRequest(SubscriptableBaseModel):
queries: Sequence[str]
query: str
max_results: Optional[int] = None
class WebSearchResult(SubscriptableBaseModel):
title: str
url: str
content: str
class WebCrawlResult(SubscriptableBaseModel):
title: str
class WebFetchRequest(SubscriptableBaseModel):
url: str
content: str
links: Optional[Sequence[str]] = None
class WebSearchResponse(SubscriptableBaseModel):
results: Mapping[str, Sequence[WebSearchResult]]
success: bool
errors: Optional[Sequence[str]] = None
results: Sequence[WebSearchResult]
class WebCrawlRequest(SubscriptableBaseModel):
urls: Sequence[str]
class WebCrawlResponse(SubscriptableBaseModel):
results: Mapping[str, Sequence[WebCrawlResult]]
success: bool
errors: Optional[Sequence[str]] = None
class WebFetchResponse(SubscriptableBaseModel):
title: str
content: str
links: Optional[Sequence[str]] = None
class RequestError(Exception):