client: improve error messaging on connection failure (#398)

*iImprove error messaging on connection failure
This commit is contained in:
Parth Sareen 2025-01-16 13:55:17 -08:00 committed by GitHub
parent 12d6842f32
commit 967fd657f1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 46 additions and 9 deletions

View File

@ -2,9 +2,9 @@ from ollama import Client
client = Client()
response = client.create(
model='my-assistant',
from_='llama3.2',
system="You are mario from Super Mario Bros.",
stream=False
model='my-assistant',
from_='llama3.2',
system='You are mario from Super Mario Bros.',
stream=False,
)
print(response.status)

View File

@ -106,17 +106,22 @@ class BaseClient:
)
CONNECTION_ERROR_MESSAGE = 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
class Client(BaseClient):
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
super().__init__(httpx.Client, host, **kwargs)
def _request_raw(self, *args, **kwargs):
r = self._client.request(*args, **kwargs)
try:
r = self._client.request(*args, **kwargs)
r.raise_for_status()
return r
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return r
except httpx.ConnectError:
raise ConnectionError(CONNECTION_ERROR_MESSAGE) from None
@overload
def _request(
@ -613,12 +618,14 @@ class AsyncClient(BaseClient):
super().__init__(httpx.AsyncClient, host, **kwargs)
async def _request_raw(self, *args, **kwargs):
r = await self._client.request(*args, **kwargs)
try:
r = await self._client.request(*args, **kwargs)
r.raise_for_status()
return r
except httpx.HTTPStatusError as e:
raise ResponseError(e.response.text, e.response.status_code) from None
return r
except httpx.ConnectError:
raise ConnectionError(CONNECTION_ERROR_MESSAGE) from None
@overload
async def _request(

View File

@ -535,3 +535,6 @@ class ResponseError(Exception):
self.status_code = status_code
'HTTP status code of the response.'
def __str__(self) -> str:
return f'{self.error} (status code: {self.status_code})'

View File

@ -9,7 +9,7 @@ from pydantic import BaseModel, ValidationError
from pytest_httpserver import HTTPServer, URIPattern
from werkzeug.wrappers import Request, Response
from ollama._client import AsyncClient, Client, _copy_tools
from ollama._client import CONNECTION_ERROR_MESSAGE, AsyncClient, Client, _copy_tools
PNG_BASE64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'
PNG_BYTES = base64.b64decode(PNG_BASE64)
@ -1112,3 +1112,30 @@ def test_tool_validation():
with pytest.raises(ValidationError):
invalid_tool = {'type': 'invalid_type', 'function': {'name': 'test'}}
list(_copy_tools([invalid_tool]))
def test_client_connection_error():
client = Client('http://localhost:1234')
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}])
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}])
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
client.generate('model', 'prompt')
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
client.show('model')
@pytest.mark.asyncio
async def test_async_client_connection_error():
client = AsyncClient('http://localhost:1234')
with pytest.raises(ConnectionError) as exc_info:
await client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}])
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
with pytest.raises(ConnectionError) as exc_info:
await client.generate('model', 'prompt')
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
with pytest.raises(ConnectionError) as exc_info:
await client.show('model')
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'