mirror of
https://github.com/ollama/ollama-python.git
synced 2026-05-01 11:48:17 +08:00
client: improve error messaging on connection failure (#398)
*iImprove error messaging on connection failure
This commit is contained in:
parent
12d6842f32
commit
967fd657f1
@ -2,9 +2,9 @@ from ollama import Client
|
|||||||
|
|
||||||
client = Client()
|
client = Client()
|
||||||
response = client.create(
|
response = client.create(
|
||||||
model='my-assistant',
|
model='my-assistant',
|
||||||
from_='llama3.2',
|
from_='llama3.2',
|
||||||
system="You are mario from Super Mario Bros.",
|
system='You are mario from Super Mario Bros.',
|
||||||
stream=False
|
stream=False,
|
||||||
)
|
)
|
||||||
print(response.status)
|
print(response.status)
|
||||||
|
|||||||
@ -106,17 +106,22 @@ class BaseClient:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
CONNECTION_ERROR_MESSAGE = 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
|
||||||
|
|
||||||
|
|
||||||
class Client(BaseClient):
|
class Client(BaseClient):
|
||||||
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
|
def __init__(self, host: Optional[str] = None, **kwargs) -> None:
|
||||||
super().__init__(httpx.Client, host, **kwargs)
|
super().__init__(httpx.Client, host, **kwargs)
|
||||||
|
|
||||||
def _request_raw(self, *args, **kwargs):
|
def _request_raw(self, *args, **kwargs):
|
||||||
r = self._client.request(*args, **kwargs)
|
|
||||||
try:
|
try:
|
||||||
|
r = self._client.request(*args, **kwargs)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
return r
|
||||||
except httpx.HTTPStatusError as e:
|
except httpx.HTTPStatusError as e:
|
||||||
raise ResponseError(e.response.text, e.response.status_code) from None
|
raise ResponseError(e.response.text, e.response.status_code) from None
|
||||||
return r
|
except httpx.ConnectError:
|
||||||
|
raise ConnectionError(CONNECTION_ERROR_MESSAGE) from None
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def _request(
|
def _request(
|
||||||
@ -613,12 +618,14 @@ class AsyncClient(BaseClient):
|
|||||||
super().__init__(httpx.AsyncClient, host, **kwargs)
|
super().__init__(httpx.AsyncClient, host, **kwargs)
|
||||||
|
|
||||||
async def _request_raw(self, *args, **kwargs):
|
async def _request_raw(self, *args, **kwargs):
|
||||||
r = await self._client.request(*args, **kwargs)
|
|
||||||
try:
|
try:
|
||||||
|
r = await self._client.request(*args, **kwargs)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
return r
|
||||||
except httpx.HTTPStatusError as e:
|
except httpx.HTTPStatusError as e:
|
||||||
raise ResponseError(e.response.text, e.response.status_code) from None
|
raise ResponseError(e.response.text, e.response.status_code) from None
|
||||||
return r
|
except httpx.ConnectError:
|
||||||
|
raise ConnectionError(CONNECTION_ERROR_MESSAGE) from None
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
async def _request(
|
async def _request(
|
||||||
|
|||||||
@ -535,3 +535,6 @@ class ResponseError(Exception):
|
|||||||
|
|
||||||
self.status_code = status_code
|
self.status_code = status_code
|
||||||
'HTTP status code of the response.'
|
'HTTP status code of the response.'
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f'{self.error} (status code: {self.status_code})'
|
||||||
|
|||||||
@ -9,7 +9,7 @@ from pydantic import BaseModel, ValidationError
|
|||||||
from pytest_httpserver import HTTPServer, URIPattern
|
from pytest_httpserver import HTTPServer, URIPattern
|
||||||
from werkzeug.wrappers import Request, Response
|
from werkzeug.wrappers import Request, Response
|
||||||
|
|
||||||
from ollama._client import AsyncClient, Client, _copy_tools
|
from ollama._client import CONNECTION_ERROR_MESSAGE, AsyncClient, Client, _copy_tools
|
||||||
|
|
||||||
PNG_BASE64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'
|
PNG_BASE64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'
|
||||||
PNG_BYTES = base64.b64decode(PNG_BASE64)
|
PNG_BYTES = base64.b64decode(PNG_BASE64)
|
||||||
@ -1112,3 +1112,30 @@ def test_tool_validation():
|
|||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
invalid_tool = {'type': 'invalid_type', 'function': {'name': 'test'}}
|
invalid_tool = {'type': 'invalid_type', 'function': {'name': 'test'}}
|
||||||
list(_copy_tools([invalid_tool]))
|
list(_copy_tools([invalid_tool]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_connection_error():
|
||||||
|
client = Client('http://localhost:1234')
|
||||||
|
|
||||||
|
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
|
||||||
|
client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}])
|
||||||
|
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
|
||||||
|
client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}])
|
||||||
|
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
|
||||||
|
client.generate('model', 'prompt')
|
||||||
|
with pytest.raises(ConnectionError, match=CONNECTION_ERROR_MESSAGE):
|
||||||
|
client.show('model')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_async_client_connection_error():
|
||||||
|
client = AsyncClient('http://localhost:1234')
|
||||||
|
with pytest.raises(ConnectionError) as exc_info:
|
||||||
|
await client.chat('model', messages=[{'role': 'user', 'content': 'prompt'}])
|
||||||
|
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
|
||||||
|
with pytest.raises(ConnectionError) as exc_info:
|
||||||
|
await client.generate('model', 'prompt')
|
||||||
|
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
|
||||||
|
with pytest.raises(ConnectionError) as exc_info:
|
||||||
|
await client.show('model')
|
||||||
|
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user