This commit is contained in:
ParthSareen 2025-09-17 11:01:58 -07:00
parent 9f41447f20
commit 4390741023
3 changed files with 153 additions and 0 deletions

View File

@ -1,4 +1,8 @@
from ollama._client import AsyncClient, Client
from ollama._browser import (
Browser
)
from ollama._types import (
ChatResponse,
EmbeddingsResponse,
@ -15,6 +19,8 @@ from ollama._types import (
ShowResponse,
StatusResponse,
Tool,
WebSearchResponse,
WebCrawlResponse,
)
__all__ = [
@ -35,6 +41,9 @@ __all__ = [
'ShowResponse',
'StatusResponse',
'Tool',
'WebSearchResponse',
'WebCrawlResponse',
'Browser',
]
_client = Client()
@ -51,3 +60,5 @@ list = _client.list
copy = _client.copy
show = _client.show
ps = _client.ps
websearch = _client.websearch
webcrawl = _client.webcrawl

View File

@ -66,6 +66,10 @@ from ollama._types import (
ShowResponse,
StatusResponse,
Tool,
WebCrawlRequest,
WebCrawlResponse,
WebSearchRequest,
WebSearchResponse,
)
T = TypeVar('T')
@ -102,6 +106,8 @@ class BaseClient:
'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': f'ollama-python/{__version__} ({platform.machine()} {platform.system().lower()}) Python/{platform.python_version()}',
# TODO: this is to make the client feel good
'Authorization': f'Bearer {(headers or {}).get("Authorization") or os.getenv("OLLAMA_API_KEY")}' if (headers or {}).get("Authorization") or os.getenv("OLLAMA_API_KEY") else None,
}.items()
},
**kwargs,
@ -622,6 +628,45 @@ class Client(BaseClient):
'/api/ps',
)
def websearch(self, query: str, max_results: int = 3) -> WebSearchResponse:
"""
Perform a web search using ollama.com.
Args:
query: The query to search for max_results: The maximum number of results to return.
Returns:
WebSearchResponse with the search results
"""
return self._request(
WebSearchResponse,
'POST',
'https://ollama.com/api/web_search',
json=WebSearchRequest(
queries=[query],
max_results=max_results,
).model_dump(exclude_none=True),
)
def webcrawl(self, urls: Sequence[str]) -> WebCrawlResponse:
"""
Gets the content of web pages for the provided URLs.
Args:
urls: The URLs to crawl
Returns:
WebCrawlResponse with the crawl results
"""
return self._request(
WebCrawlResponse,
'POST',
'https://ollama.com/api/web_crawl',
json=WebCrawlRequest(
urls=urls,
).model_dump(exclude_none=True),
)
class AsyncClient(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:

View File

@ -538,6 +538,103 @@ class ProcessResponse(SubscriptableBaseModel):
models: Sequence[Model]
class WebSearchRequest(SubscriptableBaseModel):
queries: Sequence[str]
max_results: Optional[int] = None
class SearchResult(SubscriptableBaseModel):
title: str
url: str
content: str
metadata: Optional['SearchResultMetadata'] = None
class CrawlResult(SubscriptableBaseModel):
title: str
url: str
content: str
links: Optional[Sequence[str]] = None
metadata: Optional['CrawlResultMetadata'] = None
class SearchResultContent(SubscriptableBaseModel):
snippet: str
full_text: str
class SearchResultMetadata(SubscriptableBaseModel):
published_date: Optional[str] = None
author: Optional[str] = None
class WebSearchResponse(SubscriptableBaseModel):
results: Mapping[str, Sequence[SearchResult]]
success: bool
errors: Optional[Sequence[str]] = None
def __str__(self) -> str:
if not self.success:
error_msg = ', '.join(self.errors) if self.errors else 'Unknown error'
return f'Web search failed: {error_msg}'
output = []
for query, search_results in self.results.items():
output.append(f'Search results for "{query}":')
for i, result in enumerate(search_results, 1):
output.append(f'{i}. {result.title}')
output.append(f' URL: {result.url}')
output.append(f' Content: {result.content}')
if result.metadata and result.metadata.published_date:
output.append(f' Published: {result.metadata.published_date}')
if result.metadata and result.metadata.author:
output.append(f' Author: {result.metadata.author}')
output.append('')
return '\n'.join(output).rstrip()
class WebCrawlRequest(SubscriptableBaseModel):
urls: Sequence[str]
class CrawlResultContent(SubscriptableBaseModel):
# provides the first 200 characters of the full text
snippet: str
full_text: str
class CrawlResultMetadata(SubscriptableBaseModel):
published_date: Optional[str] = None
author: Optional[str] = None
class WebCrawlResponse(SubscriptableBaseModel):
results: Mapping[str, Sequence[CrawlResult]]
success: bool
errors: Optional[Sequence[str]] = None
def __str__(self) -> str:
if not self.success:
error_msg = ', '.join(self.errors) if self.errors else 'Unknown error'
return f'Web crawl failed: {error_msg}'
output = []
for url, crawl_results in self.results.items():
output.append(f'Crawl results for "{url}":')
for i, result in enumerate(crawl_results, 1):
output.append(f'{i}. {result.title}')
output.append(f' URL: {result.url}')
output.append(f' Content: {result.content}')
if result.links:
output.append(f' Links: {", ".join(result.links)}')
if result.metadata and result.metadata.published_date:
output.append(f' Published: {result.metadata.published_date}')
if result.metadata and result.metadata.author:
output.append(f' Author: {result.metadata.author}')
output.append('')
return '\n'.join(output).rstrip()
class RequestError(Exception):
"""
Common class for request errors.