mirror of
https://github.com/ollama/ollama-python.git
synced 2026-05-03 04:42:42 +00:00
feat: add client.exists() to check if a model is available locally
Implements #640 — adds exists() method to Client and AsyncClient that wraps list() to check if a specific model is pulled locally. Usage: client.exists('llama3.1:8b') # True or False Also exposed as top-level ollama.exists() convenience function.
This commit is contained in:
@@ -52,6 +52,7 @@ push = _client.push
|
|||||||
create = _client.create
|
create = _client.create
|
||||||
delete = _client.delete
|
delete = _client.delete
|
||||||
list = _client.list
|
list = _client.list
|
||||||
|
exists = _client.exists
|
||||||
copy = _client.copy
|
copy = _client.copy
|
||||||
show = _client.show
|
show = _client.show
|
||||||
ps = _client.ps
|
ps = _client.ps
|
||||||
|
|||||||
@@ -629,6 +629,18 @@ class Client(BaseClient):
|
|||||||
'/api/tags',
|
'/api/tags',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def exists(self, model: str) -> bool:
|
||||||
|
"""Check if a model is available locally.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: The model name to check (e.g. 'llama3.1:8b').
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the model exists locally, False otherwise.
|
||||||
|
"""
|
||||||
|
models = self.list().models or []
|
||||||
|
return any(m.model == model for m in models)
|
||||||
|
|
||||||
def delete(self, model: str) -> StatusResponse:
|
def delete(self, model: str) -> StatusResponse:
|
||||||
r = self._request_raw(
|
r = self._request_raw(
|
||||||
'DELETE',
|
'DELETE',
|
||||||
@@ -1270,6 +1282,19 @@ class AsyncClient(BaseClient):
|
|||||||
'/api/tags',
|
'/api/tags',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def exists(self, model: str) -> bool:
|
||||||
|
"""Check if a model is available locally.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: The model name to check (e.g. 'llama3.1:8b').
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the model exists locally, False otherwise.
|
||||||
|
"""
|
||||||
|
resp = await self.list()
|
||||||
|
models = resp.models or []
|
||||||
|
return any(m.model == model for m in models)
|
||||||
|
|
||||||
async def delete(self, model: str) -> StatusResponse:
|
async def delete(self, model: str) -> StatusResponse:
|
||||||
r = await self._request_raw(
|
r = await self._request_raw(
|
||||||
'DELETE',
|
'DELETE',
|
||||||
|
|||||||
@@ -0,0 +1,82 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pytest_httpserver import HTTPServer
|
||||||
|
|
||||||
|
from ollama._client import AsyncClient, Client
|
||||||
|
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.anyio
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def anyio_backend():
|
||||||
|
return 'asyncio'
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_exists_true(httpserver: HTTPServer):
|
||||||
|
"""exists() returns True when model is present."""
|
||||||
|
httpserver.expect_ordered_request(
|
||||||
|
'/api/tags',
|
||||||
|
method='GET',
|
||||||
|
).respond_with_json({
|
||||||
|
'models': [
|
||||||
|
{'name': 'llama3.1:8b', 'model': 'llama3.1:8b', 'size': 4661224676},
|
||||||
|
{'name': 'qwen2.5:latest', 'model': 'qwen2.5:latest', 'size': 4430121000},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
client = Client(host=f'http://{httpserver.host}:{httpserver.port}')
|
||||||
|
assert client.exists('llama3.1:8b') is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_exists_false(httpserver: HTTPServer):
|
||||||
|
"""exists() returns False when model is not present."""
|
||||||
|
httpserver.expect_ordered_request(
|
||||||
|
'/api/tags',
|
||||||
|
method='GET',
|
||||||
|
).respond_with_json({
|
||||||
|
'models': [
|
||||||
|
{'name': 'llama3.1:8b', 'model': 'llama3.1:8b', 'size': 4661224676},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
client = Client(host=f'http://{httpserver.host}:{httpserver.port}')
|
||||||
|
assert client.exists('gemma2:2b') is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_exists_empty_list(httpserver: HTTPServer):
|
||||||
|
"""exists() returns False when no models are available."""
|
||||||
|
httpserver.expect_ordered_request(
|
||||||
|
'/api/tags',
|
||||||
|
method='GET',
|
||||||
|
).respond_with_json({'models': []})
|
||||||
|
|
||||||
|
client = Client(host=f'http://{httpserver.host}:{httpserver.port}')
|
||||||
|
assert client.exists('llama3.1:8b') is False
|
||||||
|
|
||||||
|
|
||||||
|
async def test_async_client_exists_true(httpserver: HTTPServer):
|
||||||
|
"""Async exists() returns True when model is present."""
|
||||||
|
httpserver.expect_ordered_request(
|
||||||
|
'/api/tags',
|
||||||
|
method='GET',
|
||||||
|
).respond_with_json({
|
||||||
|
'models': [
|
||||||
|
{'name': 'llama3.1:8b', 'model': 'llama3.1:8b', 'size': 4661224676},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
client = AsyncClient(host=f'http://{httpserver.host}:{httpserver.port}')
|
||||||
|
assert await client.exists('llama3.1:8b') is True
|
||||||
|
|
||||||
|
|
||||||
|
async def test_async_client_exists_false(httpserver: HTTPServer):
|
||||||
|
"""Async exists() returns False when model is not present."""
|
||||||
|
httpserver.expect_ordered_request(
|
||||||
|
'/api/tags',
|
||||||
|
method='GET',
|
||||||
|
).respond_with_json({'models': []})
|
||||||
|
|
||||||
|
client = AsyncClient(host=f'http://{httpserver.host}:{httpserver.port}')
|
||||||
|
assert await client.exists('nonexistent') is False
|
||||||
Reference in New Issue
Block a user