pydantic types

This commit is contained in:
Michael Yang 2024-09-04 10:40:35 -07:00
parent a7571423d3
commit 0bbc246007
7 changed files with 1218 additions and 647 deletions

View File

@ -1,10 +1,17 @@
from ollama._client import Client, AsyncClient from ollama._client import Client, AsyncClient
from ollama._types import ( from ollama._types import (
Options,
Message,
Tool,
GenerateResponse, GenerateResponse,
ChatResponse, ChatResponse,
EmbedResponse,
EmbeddingsResponse,
StatusResponse,
ProgressResponse, ProgressResponse,
Message, ListResponse,
Options, ShowResponse,
ProcessResponse,
RequestError, RequestError,
ResponseError, ResponseError,
) )
@ -12,25 +19,20 @@ from ollama._types import (
__all__ = [ __all__ = [
'Client', 'Client',
'AsyncClient', 'AsyncClient',
'Options',
'Message',
'Tool',
'GenerateResponse', 'GenerateResponse',
'ChatResponse', 'ChatResponse',
'EmbedResponse',
'EmbeddingsResponse',
'StatusResponse',
'ProgressResponse', 'ProgressResponse',
'Message', 'ListResponse',
'Options', 'ShowResponse',
'ProcessResponse',
'RequestError', 'RequestError',
'ResponseError', 'ResponseError',
'generate',
'chat',
'embed',
'embeddings',
'pull',
'push',
'create',
'delete',
'list',
'copy',
'show',
'ps',
] ]
_client = Client() _client = Client()

File diff suppressed because it is too large Load Diff

View File

@ -1,43 +1,162 @@
import json import json
from typing import Any, TypedDict, Sequence, Literal, Mapping from base64 import b64encode
from pathlib import Path
from datetime import datetime
from typing import (
Any,
Literal,
Mapping,
Optional,
Sequence,
Union,
)
from typing_extensions import Annotated
import sys from pydantic import (
BaseModel,
if sys.version_info < (3, 11): ByteSize,
from typing_extensions import NotRequired Field,
else: FilePath,
from typing import NotRequired Base64Str,
model_serializer,
)
from pydantic.json_schema import JsonSchemaValue
class BaseGenerateResponse(TypedDict): class SubscriptableBaseModel(BaseModel):
model: str def __getitem__(self, key: str) -> Any:
return getattr(self, key)
def __setitem__(self, key: str, value: Any) -> None:
setattr(self, key, value)
def __contains__(self, key: str) -> bool:
return hasattr(self, key)
def get(self, key: str, default: Any = None) -> Any:
return getattr(self, key, default)
class Options(SubscriptableBaseModel):
# load time options
numa: Optional[bool] = None
num_ctx: Optional[int] = None
num_batch: Optional[int] = None
num_gpu: Optional[int] = None
main_gpu: Optional[int] = None
low_vram: Optional[bool] = None
f16_kv: Optional[bool] = None
logits_all: Optional[bool] = None
vocab_only: Optional[bool] = None
use_mmap: Optional[bool] = None
use_mlock: Optional[bool] = None
embedding_only: Optional[bool] = None
num_thread: Optional[int] = None
# runtime options
num_keep: Optional[int] = None
seed: Optional[int] = None
num_predict: Optional[int] = None
top_k: Optional[int] = None
top_p: Optional[float] = None
tfs_z: Optional[float] = None
typical_p: Optional[float] = None
repeat_last_n: Optional[int] = None
temperature: Optional[float] = None
repeat_penalty: Optional[float] = None
presence_penalty: Optional[float] = None
frequency_penalty: Optional[float] = None
mirostat: Optional[int] = None
mirostat_tau: Optional[float] = None
mirostat_eta: Optional[float] = None
penalize_newline: Optional[bool] = None
stop: Optional[Sequence[str]] = None
class BaseRequest(SubscriptableBaseModel):
model: Annotated[str, Field(min_length=1)]
'Model to use for the request.'
class BaseStreamableRequest(BaseRequest):
stream: Optional[bool] = None
'Stream response.'
class BaseGenerateRequest(BaseStreamableRequest):
options: Optional[Union[Mapping[str, Any], Options]] = None
'Options to use for the request.'
format: Optional[Literal['', 'json']] = None
'Format of the response.'
keep_alive: Optional[Union[float, str]] = None
'Keep model alive for the specified duration.'
class Image(BaseModel):
value: Union[FilePath, Base64Str, bytes]
@model_serializer
def serialize_model(self):
if isinstance(self.value, Path):
return b64encode(self.value.read_bytes()).decode()
elif isinstance(self.value, bytes):
return b64encode(self.value).decode()
return self.value
class GenerateRequest(BaseGenerateRequest):
prompt: Optional[str] = None
'Prompt to generate response from.'
suffix: Optional[str] = None
'Suffix to append to the response.'
system: Optional[str] = None
'System prompt to prepend to the prompt.'
template: Optional[str] = None
'Template to use for the response.'
context: Optional[Sequence[int]] = None
'Tokenized history to use for the response.'
raw: Optional[bool] = None
images: Optional[Sequence[Image]] = None
'Image data for multimodal models.'
class BaseGenerateResponse(SubscriptableBaseModel):
model: Optional[str] = None
'Model used to generate response.' 'Model used to generate response.'
created_at: str created_at: Optional[str] = None
'Time when the request was created.' 'Time when the request was created.'
done: bool done: Optional[bool] = None
'True if response is complete, otherwise False. Useful for streaming to detect the final response.' 'True if response is complete, otherwise False. Useful for streaming to detect the final response.'
done_reason: str done_reason: Optional[str] = None
'Reason for completion. Only present when done is True.' 'Reason for completion. Only present when done is True.'
total_duration: int total_duration: Optional[int] = None
'Total duration in nanoseconds.' 'Total duration in nanoseconds.'
load_duration: int load_duration: Optional[int] = None
'Load duration in nanoseconds.' 'Load duration in nanoseconds.'
prompt_eval_count: int prompt_eval_count: Optional[int] = None
'Number of tokens evaluated in the prompt.' 'Number of tokens evaluated in the prompt.'
prompt_eval_duration: int prompt_eval_duration: Optional[int] = None
'Duration of evaluating the prompt in nanoseconds.' 'Duration of evaluating the prompt in nanoseconds.'
eval_count: int eval_count: Optional[int] = None
'Number of tokens evaluated in inference.' 'Number of tokens evaluated in inference.'
eval_duration: int eval_duration: Optional[int] = None
'Duration of evaluating inference in nanoseconds.' 'Duration of evaluating inference in nanoseconds.'
@ -49,43 +168,22 @@ class GenerateResponse(BaseGenerateResponse):
response: str response: str
'Response content. When streaming, this contains a fragment of the response.' 'Response content. When streaming, this contains a fragment of the response.'
context: Sequence[int] context: Optional[Sequence[int]] = None
'Tokenized history up to the point of the response.' 'Tokenized history up to the point of the response.'
class ToolCallFunction(TypedDict): class Message(SubscriptableBaseModel):
"""
Tool call function.
"""
name: str
'Name of the function.'
arguments: NotRequired[Mapping[str, Any]]
'Arguments of the function.'
class ToolCall(TypedDict):
"""
Model tool calls.
"""
function: ToolCallFunction
'Function to be called.'
class Message(TypedDict):
""" """
Chat message. Chat message.
""" """
role: Literal['user', 'assistant', 'system', 'tool'] role: Literal['user', 'assistant', 'system', 'tool']
"Assumed role of the message. Response messages always has role 'assistant' or 'tool'." "Assumed role of the message. Response messages has role 'assistant' or 'tool'."
content: NotRequired[str] content: Optional[str] = None
'Content of the message. Response messages contains message fragments when streaming.' 'Content of the message. Response messages contains message fragments when streaming.'
images: NotRequired[Sequence[Any]] images: Optional[Sequence[Image]] = None
""" """
Optional list of image data for multimodal models. Optional list of image data for multimodal models.
@ -97,33 +195,54 @@ class Message(TypedDict):
Valid image formats depend on the model. See the model card for more information. Valid image formats depend on the model. See the model card for more information.
""" """
tool_calls: NotRequired[Sequence[ToolCall]] class ToolCall(SubscriptableBaseModel):
"""
Model tool calls.
"""
class Function(SubscriptableBaseModel):
"""
Tool call function.
"""
name: str
'Name of the function.'
arguments: Mapping[str, Any]
'Arguments of the function.'
function: Function
'Function to be called.'
tool_calls: Optional[Sequence[ToolCall]] = None
""" """
Tools calls to be made by the model. Tools calls to be made by the model.
""" """
class Property(TypedDict): class Tool(SubscriptableBaseModel):
type: str type: Literal['function'] = 'function'
description: str
enum: NotRequired[Sequence[str]] # `enum` is optional and can be a list of strings class Function(SubscriptableBaseModel):
name: str
description: str
class Parameters(SubscriptableBaseModel):
type: str
required: Optional[Sequence[str]] = None
properties: Optional[JsonSchemaValue] = None
parameters: Parameters
function: Function
class Parameters(TypedDict): class ChatRequest(BaseGenerateRequest):
type: str messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None
required: Sequence[str] 'Messages to chat with.'
properties: Mapping[str, Property]
tools: Optional[Sequence[Tool]] = None
class ToolFunction(TypedDict): 'Tools to use for the chat.'
name: str
description: str
parameters: Parameters
class Tool(TypedDict):
type: str
function: ToolFunction
class ChatResponse(BaseGenerateResponse): class ChatResponse(BaseGenerateResponse):
@ -135,47 +254,156 @@ class ChatResponse(BaseGenerateResponse):
'Response message.' 'Response message.'
class ProgressResponse(TypedDict): class EmbedRequest(BaseRequest):
status: str input: Union[str, Sequence[str]]
completed: int 'Input text to embed.'
total: int
digest: str truncate: Optional[bool] = None
'Truncate the input to the maximum token length.'
options: Optional[Union[Mapping[str, Any], Options]] = None
'Options to use for the request.'
keep_alive: Optional[Union[float, str]] = None
class Options(TypedDict, total=False): class EmbedResponse(BaseGenerateResponse):
# load time options """
numa: bool Response returned by embed requests.
num_ctx: int """
num_batch: int
num_gpu: int
main_gpu: int
low_vram: bool
f16_kv: bool
logits_all: bool
vocab_only: bool
use_mmap: bool
use_mlock: bool
embedding_only: bool
num_thread: int
# runtime options embeddings: Sequence[Sequence[float]]
num_keep: int 'Embeddings of the inputs.'
seed: int
num_predict: int
top_k: int class EmbeddingsRequest(BaseRequest):
top_p: float prompt: Optional[str] = None
tfs_z: float 'Prompt to generate embeddings from.'
typical_p: float
repeat_last_n: int options: Optional[Union[Mapping[str, Any], Options]] = None
temperature: float 'Options to use for the request.'
repeat_penalty: float
presence_penalty: float keep_alive: Optional[Union[float, str]] = None
frequency_penalty: float
mirostat: int
mirostat_tau: float class EmbeddingsResponse(SubscriptableBaseModel):
mirostat_eta: float """
penalize_newline: bool Response returned by embeddings requests.
stop: Sequence[str] """
embedding: Sequence[float]
'Embedding of the prompt.'
class PullRequest(BaseStreamableRequest):
"""
Request to pull the model.
"""
insecure: Optional[bool] = None
'Allow insecure (HTTP) connections.'
class PushRequest(BaseStreamableRequest):
"""
Request to pull the model.
"""
insecure: Optional[bool] = None
'Allow insecure (HTTP) connections.'
class CreateRequest(BaseStreamableRequest):
"""
Request to create a new model.
"""
modelfile: Optional[str] = None
quantize: Optional[str] = None
class ModelDetails(SubscriptableBaseModel):
parent_model: Optional[str] = None
format: Optional[str] = None
family: Optional[str] = None
families: Optional[Sequence[str]] = None
parameter_size: Optional[str] = None
quantization_level: Optional[str] = None
class ListResponse(SubscriptableBaseModel):
class Model(BaseModel):
modified_at: Optional[datetime] = None
digest: Optional[str] = None
size: Optional[ByteSize] = None
details: Optional[ModelDetails] = None
models: Sequence[Model]
'List of models.'
class DeleteRequest(BaseRequest):
"""
Request to delete a model.
"""
class CopyRequest(BaseModel):
"""
Request to copy a model.
"""
source: str
'Source model to copy.'
destination: str
'Destination model to copy to.'
class StatusResponse(SubscriptableBaseModel):
status: Optional[str] = None
class ProgressResponse(StatusResponse):
completed: Optional[int] = None
total: Optional[int] = None
digest: Optional[str] = None
class ShowRequest(BaseRequest):
"""
Request to show model information.
"""
class ShowResponse(SubscriptableBaseModel):
modified_at: Optional[datetime] = None
template: Optional[str] = None
modelfile: Optional[str] = None
license: Optional[str] = None
details: Optional[ModelDetails] = None
modelinfo: Optional[Mapping[str, Any]] = Field(alias='model_info')
parameters: Optional[str] = None
class ProcessResponse(SubscriptableBaseModel):
class Model(BaseModel):
model: Optional[str] = None
name: Optional[str] = None
digest: Optional[str] = None
expires_at: Optional[datetime] = None
size: Optional[ByteSize] = None
size_vram: Optional[ByteSize] = None
details: Optional[ModelDetails] = None
models: Sequence[Model]
class RequestError(Exception): class RequestError(Exception):

162
poetry.lock generated
View File

@ -1,5 +1,19 @@
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
[[package]]
name = "annotated-types"
version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
[package.dependencies]
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
[[package]] [[package]]
name = "anyio" name = "anyio"
version = "4.5.2" version = "4.5.2"
@ -419,6 +433,130 @@ files = [
dev = ["pre-commit", "tox"] dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"] testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pydantic"
version = "2.9.0"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic-2.9.0-py3-none-any.whl", hash = "sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370"},
{file = "pydantic-2.9.0.tar.gz", hash = "sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598"},
]
[package.dependencies]
annotated-types = ">=0.4.0"
pydantic-core = "2.23.2"
typing-extensions = [
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
]
tzdata = {version = "*", markers = "python_version >= \"3.9\""}
[package.extras]
email = ["email-validator (>=2.0.0)"]
[[package]]
name = "pydantic-core"
version = "2.23.2"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic_core-2.23.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece"},
{file = "pydantic_core-2.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7"},
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78"},
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c"},
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e"},
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622"},
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb"},
{file = "pydantic_core-2.23.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc"},
{file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354"},
{file = "pydantic_core-2.23.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2"},
{file = "pydantic_core-2.23.2-cp310-none-win32.whl", hash = "sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854"},
{file = "pydantic_core-2.23.2-cp310-none-win_amd64.whl", hash = "sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a"},
{file = "pydantic_core-2.23.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8"},
{file = "pydantic_core-2.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2"},
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178"},
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515"},
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd"},
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce"},
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f"},
{file = "pydantic_core-2.23.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57"},
{file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4"},
{file = "pydantic_core-2.23.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa"},
{file = "pydantic_core-2.23.2-cp311-none-win32.whl", hash = "sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576"},
{file = "pydantic_core-2.23.2-cp311-none-win_amd64.whl", hash = "sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589"},
{file = "pydantic_core-2.23.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec"},
{file = "pydantic_core-2.23.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43"},
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41"},
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad"},
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49"},
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81"},
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f"},
{file = "pydantic_core-2.23.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0"},
{file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73"},
{file = "pydantic_core-2.23.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0"},
{file = "pydantic_core-2.23.2-cp312-none-win32.whl", hash = "sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f"},
{file = "pydantic_core-2.23.2-cp312-none-win_amd64.whl", hash = "sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342"},
{file = "pydantic_core-2.23.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac"},
{file = "pydantic_core-2.23.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2"},
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0"},
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b"},
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30"},
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703"},
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960"},
{file = "pydantic_core-2.23.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604"},
{file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d"},
{file = "pydantic_core-2.23.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced"},
{file = "pydantic_core-2.23.2-cp313-none-win32.whl", hash = "sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1"},
{file = "pydantic_core-2.23.2-cp313-none-win_amd64.whl", hash = "sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac"},
{file = "pydantic_core-2.23.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100"},
{file = "pydantic_core-2.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa"},
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b"},
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472"},
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7"},
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af"},
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a"},
{file = "pydantic_core-2.23.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f"},
{file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501"},
{file = "pydantic_core-2.23.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5"},
{file = "pydantic_core-2.23.2-cp38-none-win32.whl", hash = "sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf"},
{file = "pydantic_core-2.23.2-cp38-none-win_amd64.whl", hash = "sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8"},
{file = "pydantic_core-2.23.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59"},
{file = "pydantic_core-2.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87"},
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123"},
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5"},
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae"},
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69"},
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79"},
{file = "pydantic_core-2.23.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c"},
{file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80"},
{file = "pydantic_core-2.23.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6"},
{file = "pydantic_core-2.23.2-cp39-none-win32.whl", hash = "sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437"},
{file = "pydantic_core-2.23.2-cp39-none-win_amd64.whl", hash = "sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940"},
{file = "pydantic_core-2.23.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653"},
{file = "pydantic_core-2.23.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2"},
{file = "pydantic_core-2.23.2.tar.gz", hash = "sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd"},
]
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "8.3.3" version = "8.3.3"
@ -551,6 +689,28 @@ files = [
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
] ]
[[package]]
name = "typing-extensions"
version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "tzdata"
version = "2024.1"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
]
[[package]] [[package]]
name = "werkzeug" name = "werkzeug"
version = "3.0.6" version = "3.0.6"
@ -571,4 +731,4 @@ watchdog = ["watchdog (>=2.3)"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.8" python-versions = "^3.8"
content-hash = "e36516c932ab9dd7497acc0c3d55ab2c963004595efe97c2bc80854687c32c1e" content-hash = "e664c86cc330480eb86239842f55f12b0fba4df5c2fc776d094f37f58320e637"

View File

@ -11,6 +11,7 @@ repository = "https://github.com/ollama/ollama-python"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.8"
httpx = "^0.27.0" httpx = "^0.27.0"
pydantic = "^2.9.0"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
pytest = ">=7.4.3,<9.0.0" pytest = ">=7.4.3,<9.0.0"

View File

@ -1,3 +1,6 @@
annotated-types==0.7.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
--hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
anyio==4.5.2 ; python_version >= "3.8" and python_version < "4.0" \ anyio==4.5.2 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b \ --hash=sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b \
--hash=sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f --hash=sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f
@ -19,9 +22,105 @@ httpx==0.27.2 ; python_version >= "3.8" and python_version < "4.0" \
idna==3.10 ; python_version >= "3.8" and python_version < "4.0" \ idna==3.10 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
pydantic-core==2.23.2 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:0102e49ac7d2df3379ef8d658d3bc59d3d769b0bdb17da189b75efa861fc07b4 \
--hash=sha256:0123655fedacf035ab10c23450163c2f65a4174f2bb034b188240a6cf06bb123 \
--hash=sha256:043ef8469f72609c4c3a5e06a07a1f713d53df4d53112c6d49207c0bd3c3bd9b \
--hash=sha256:0448b81c3dfcde439551bb04a9f41d7627f676b12701865c8a2574bcea034437 \
--hash=sha256:05b366fb8fe3d8683b11ac35fa08947d7b92be78ec64e3277d03bd7f9b7cda79 \
--hash=sha256:07049ec9306ec64e955b2e7c40c8d77dd78ea89adb97a2013d0b6e055c5ee4c5 \
--hash=sha256:084414ffe9a85a52940b49631321d636dadf3576c30259607b75516d131fecd0 \
--hash=sha256:086c5db95157dc84c63ff9d96ebb8856f47ce113c86b61065a066f8efbe80acf \
--hash=sha256:12625e69b1199e94b0ae1c9a95d000484ce9f0182f9965a26572f054b1537e44 \
--hash=sha256:16b25a4a120a2bb7dab51b81e3d9f3cde4f9a4456566c403ed29ac81bf49744f \
--hash=sha256:19f1352fe4b248cae22a89268720fc74e83f008057a652894f08fa931e77dced \
--hash=sha256:1a2ab4f410f4b886de53b6bddf5dd6f337915a29dd9f22f20f3099659536b2f6 \
--hash=sha256:1c7b81beaf7c7ebde978377dc53679c6cba0e946426fc7ade54251dfe24a7604 \
--hash=sha256:1cf842265a3a820ebc6388b963ead065f5ce8f2068ac4e1c713ef77a67b71f7c \
--hash=sha256:1eb37f7d6a8001c0f86dc8ff2ee8d08291a536d76e49e78cda8587bb54d8b329 \
--hash=sha256:23af245b8f2f4ee9e2c99cb3f93d0e22fb5c16df3f2f643f5a8da5caff12a653 \
--hash=sha256:257d6a410a0d8aeb50b4283dea39bb79b14303e0fab0f2b9d617701331ed1515 \
--hash=sha256:276ae78153a94b664e700ac362587c73b84399bd1145e135287513442e7dfbc7 \
--hash=sha256:2b1a195efd347ede8bcf723e932300292eb13a9d2a3c1f84eb8f37cbbc905b7f \
--hash=sha256:329a721253c7e4cbd7aad4a377745fbcc0607f9d72a3cc2102dd40519be75ed2 \
--hash=sha256:358331e21a897151e54d58e08d0219acf98ebb14c567267a87e971f3d2a3be59 \
--hash=sha256:3649bd3ae6a8ebea7dc381afb7f3c6db237fc7cebd05c8ac36ca8a4187b03b30 \
--hash=sha256:3713dc093d5048bfaedbba7a8dbc53e74c44a140d45ede020dc347dda18daf3f \
--hash=sha256:3ef71ec876fcc4d3bbf2ae81961959e8d62f8d74a83d116668409c224012e3af \
--hash=sha256:41ae8537ad371ec018e3c5da0eb3f3e40ee1011eb9be1da7f965357c4623c501 \
--hash=sha256:4a801c5e1e13272e0909c520708122496647d1279d252c9e6e07dac216accc41 \
--hash=sha256:4c83c64d05ffbbe12d4e8498ab72bdb05bcc1026340a4a597dc647a13c1605ec \
--hash=sha256:4cebb9794f67266d65e7e4cbe5dcf063e29fc7b81c79dc9475bd476d9534150e \
--hash=sha256:5668b3173bb0b2e65020b60d83f5910a7224027232c9f5dc05a71a1deac9f960 \
--hash=sha256:56e6a12ec8d7679f41b3750ffa426d22b44ef97be226a9bab00a03365f217b2b \
--hash=sha256:582871902e1902b3c8e9b2c347f32a792a07094110c1bca6c2ea89b90150caac \
--hash=sha256:5c8aa40f6ca803f95b1c1c5aeaee6237b9e879e4dfb46ad713229a63651a95fb \
--hash=sha256:5d813fd871b3d5c3005157622ee102e8908ad6011ec915a18bd8fde673c4360e \
--hash=sha256:5dd0ec5f514ed40e49bf961d49cf1bc2c72e9b50f29a163b2cc9030c6742aa73 \
--hash=sha256:5f3cf3721eaf8741cffaf092487f1ca80831202ce91672776b02b875580e174a \
--hash=sha256:6294907eaaccf71c076abdd1c7954e272efa39bb043161b4b8aa1cd76a16ce43 \
--hash=sha256:64d094ea1aa97c6ded4748d40886076a931a8bf6f61b6e43e4a1041769c39dd2 \
--hash=sha256:6650a7bbe17a2717167e3e23c186849bae5cef35d38949549f1c116031b2b3aa \
--hash=sha256:67b6655311b00581914aba481729971b88bb8bc7996206590700a3ac85e457b8 \
--hash=sha256:6b06c5d4e8701ac2ba99a2ef835e4e1b187d41095a9c619c5b185c9068ed2a49 \
--hash=sha256:6ce883906810b4c3bd90e0ada1f9e808d9ecf1c5f0b60c6b8831d6100bcc7dd6 \
--hash=sha256:6db09153d8438425e98cdc9a289c5fade04a5d2128faff8f227c459da21b9703 \
--hash=sha256:6f80fba4af0cb1d2344869d56430e304a51396b70d46b91a55ed4959993c0589 \
--hash=sha256:743e5811b0c377eb830150d675b0847a74a44d4ad5ab8845923d5b3a756d8100 \
--hash=sha256:753294d42fb072aa1775bfe1a2ba1012427376718fa4c72de52005a3d2a22178 \
--hash=sha256:7568f682c06f10f30ef643a1e8eec4afeecdafde5c4af1b574c6df079e96f96c \
--hash=sha256:7706e15cdbf42f8fab1e6425247dfa98f4a6f8c63746c995d6a2017f78e619ae \
--hash=sha256:785e7f517ebb9890813d31cb5d328fa5eda825bb205065cde760b3150e4de1f7 \
--hash=sha256:7a05c0240f6c711eb381ac392de987ee974fa9336071fb697768dfdb151345ce \
--hash=sha256:7ce7eaf9a98680b4312b7cebcdd9352531c43db00fca586115845df388f3c465 \
--hash=sha256:7ce8e26b86a91e305858e018afc7a6e932f17428b1eaa60154bd1f7ee888b5f8 \
--hash=sha256:7d0324a35ab436c9d768753cbc3c47a865a2cbc0757066cb864747baa61f6ece \
--hash=sha256:7e9b24cca4037a561422bf5dc52b38d390fb61f7bfff64053ce1b72f6938e6b2 \
--hash=sha256:810ca06cca91de9107718dc83d9ac4d2e86efd6c02cba49a190abcaf33fb0472 \
--hash=sha256:820f6ee5c06bc868335e3b6e42d7ef41f50dfb3ea32fbd523ab679d10d8741c0 \
--hash=sha256:82764c0bd697159fe9947ad59b6db6d7329e88505c8f98990eb07e84cc0a5d81 \
--hash=sha256:8ae65fdfb8a841556b52935dfd4c3f79132dc5253b12c0061b96415208f4d622 \
--hash=sha256:8d5b0ff3218858859910295df6953d7bafac3a48d5cd18f4e3ed9999efd2245f \
--hash=sha256:95d6bf449a1ac81de562d65d180af5d8c19672793c81877a2eda8fde5d08f2fd \
--hash=sha256:964c7aa318da542cdcc60d4a648377ffe1a2ef0eb1e996026c7f74507b720a78 \
--hash=sha256:96ef39add33ff58cd4c112cbac076726b96b98bb8f1e7f7595288dcfb2f10b57 \
--hash=sha256:a6612c2a844043e4d10a8324c54cdff0042c558eef30bd705770793d70b224aa \
--hash=sha256:a8031074a397a5925d06b590121f8339d34a5a74cfe6970f8a1124eb8b83f4ac \
--hash=sha256:aab9e522efff3993a9e98ab14263d4e20211e62da088298089a03056980a3e69 \
--hash=sha256:ae579143826c6f05a361d9546446c432a165ecf1c0b720bbfd81152645cb897d \
--hash=sha256:ae90b9e50fe1bd115b24785e962b51130340408156d34d67b5f8f3fa6540938e \
--hash=sha256:b18cf68255a476b927910c6873d9ed00da692bb293c5b10b282bd48a0afe3ae2 \
--hash=sha256:b7efb12e5071ad8d5b547487bdad489fbd4a5a35a0fc36a1941517a6ad7f23e0 \
--hash=sha256:c4d9f15ffe68bcd3898b0ad7233af01b15c57d91cd1667f8d868e0eacbfe3f87 \
--hash=sha256:c53100c8ee5a1e102766abde2158077d8c374bee0639201f11d3032e3555dfbc \
--hash=sha256:c57e493a0faea1e4c38f860d6862ba6832723396c884fbf938ff5e9b224200e2 \
--hash=sha256:c8319e0bd6a7b45ad76166cc3d5d6a36c97d0c82a196f478c3ee5346566eebfd \
--hash=sha256:caffda619099cfd4f63d48462f6aadbecee3ad9603b4b88b60cb821c1b258576 \
--hash=sha256:cc0c316fba3ce72ac3ab7902a888b9dc4979162d320823679da270c2d9ad0cad \
--hash=sha256:cdd02a08205dc90238669f082747612cb3c82bd2c717adc60f9b9ecadb540f80 \
--hash=sha256:d50ac34835c6a4a0d456b5db559b82047403c4317b3bc73b3455fefdbdc54b0a \
--hash=sha256:d6b9dd6aa03c812017411734e496c44fef29b43dba1e3dd1fa7361bbacfc1354 \
--hash=sha256:da3131ef2b940b99106f29dfbc30d9505643f766704e14c5d5e504e6a480c35e \
--hash=sha256:da43cbe593e3c87d07108d0ebd73771dc414488f1f91ed2e204b0370b94b37ac \
--hash=sha256:dd59638025160056687d598b054b64a79183f8065eae0d3f5ca523cde9943940 \
--hash=sha256:e1895e949f8849bc2757c0dbac28422a04be031204df46a56ab34bcf98507342 \
--hash=sha256:e1a79ad49f346aa1a2921f31e8dbbab4d64484823e813a002679eaa46cba39e1 \
--hash=sha256:e460475719721d59cd54a350c1f71c797c763212c836bf48585478c5514d2854 \
--hash=sha256:e64ffaf8f6e17ca15eb48344d86a7a741454526f3a3fa56bc493ad9d7ec63936 \
--hash=sha256:e6e3ccebdbd6e53474b0bb7ab8b88e83c0cfe91484b25e058e581348ee5a01a5 \
--hash=sha256:e758d271ed0286d146cf7c04c539a5169a888dd0b57026be621547e756af55bc \
--hash=sha256:f087879f1ffde024dd2788a30d55acd67959dcf6c431e9d3682d1c491a0eb474 \
--hash=sha256:f477d26183e94eaafc60b983ab25af2a809a1b48ce4debb57b343f671b7a90b6 \
--hash=sha256:fc535cb898ef88333cf317777ecdfe0faac1c2a3187ef7eb061b6f7ecf7e6bae
pydantic==2.9.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:c7a8a9fdf7d100afa49647eae340e2d23efa382466a8d177efcd1381e9be5598 \
--hash=sha256:f66a7073abd93214a20c5f7b32d56843137a7a2e70d02111f3be287035c45370
sniffio==1.3.1 ; python_version >= "3.8" and python_version < "4.0" \ sniffio==1.3.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "3.11" \ typing-extensions==4.12.2 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
--hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" \
--hash=sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd \
--hash=sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252

View File

@ -28,9 +28,6 @@ def test_client_chat(httpserver: HTTPServer):
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'tools': [], 'tools': [],
'stream': False, 'stream': False,
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json( ).respond_with_json(
{ {
@ -76,9 +73,6 @@ def test_client_chat_stream(httpserver: HTTPServer):
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'tools': [], 'tools': [],
'stream': True, 'stream': True,
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_handler(stream_handler) ).respond_with_handler(stream_handler)
@ -106,9 +100,6 @@ def test_client_chat_images(httpserver: HTTPServer):
], ],
'tools': [], 'tools': [],
'stream': False, 'stream': False,
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json( ).respond_with_json(
{ {
@ -137,16 +128,7 @@ def test_client_generate(httpserver: HTTPServer):
json={ json={
'model': 'dummy', 'model': 'dummy',
'prompt': 'Why is the sky blue?', 'prompt': 'Why is the sky blue?',
'suffix': '',
'system': '',
'template': '',
'context': [],
'stream': False, 'stream': False,
'raw': False,
'images': [],
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json( ).respond_with_json(
{ {
@ -183,16 +165,7 @@ def test_client_generate_stream(httpserver: HTTPServer):
json={ json={
'model': 'dummy', 'model': 'dummy',
'prompt': 'Why is the sky blue?', 'prompt': 'Why is the sky blue?',
'suffix': '',
'system': '',
'template': '',
'context': [],
'stream': True, 'stream': True,
'raw': False,
'images': [],
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_handler(stream_handler) ).respond_with_handler(stream_handler)
@ -212,16 +185,8 @@ def test_client_generate_images(httpserver: HTTPServer):
json={ json={
'model': 'dummy', 'model': 'dummy',
'prompt': 'Why is the sky blue?', 'prompt': 'Why is the sky blue?',
'suffix': '',
'system': '',
'template': '',
'context': [],
'stream': False, 'stream': False,
'raw': False,
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json( ).respond_with_json(
{ {
@ -244,15 +209,11 @@ def test_client_pull(httpserver: HTTPServer):
'/api/pull', '/api/pull',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': False, 'stream': False,
}, },
).respond_with_json( ).respond_with_json({'status': 'success'})
{
'status': 'success',
}
)
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
response = client.pull('dummy') response = client.pull('dummy')
@ -274,7 +235,7 @@ def test_client_pull_stream(httpserver: HTTPServer):
'/api/pull', '/api/pull',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': True, 'stream': True,
}, },
@ -293,15 +254,15 @@ def test_client_push(httpserver: HTTPServer):
'/api/push', '/api/push',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': False, 'stream': False,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
response = client.push('dummy') response = client.push('dummy')
assert isinstance(response, dict) assert response['status'] == 'success'
def test_client_push_stream(httpserver: HTTPServer): def test_client_push_stream(httpserver: HTTPServer):
@ -317,7 +278,7 @@ def test_client_push_stream(httpserver: HTTPServer):
'/api/push', '/api/push',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': True, 'stream': True,
}, },
@ -337,12 +298,11 @@ def test_client_create_path(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
@ -352,7 +312,7 @@ def test_client_create_path(httpserver: HTTPServer):
modelfile.flush() modelfile.flush()
response = client.create('dummy', path=modelfile.name) response = client.create('dummy', path=modelfile.name)
assert isinstance(response, dict) assert response['status'] == 'success'
def test_client_create_path_relative(httpserver: HTTPServer): def test_client_create_path_relative(httpserver: HTTPServer):
@ -361,12 +321,11 @@ def test_client_create_path_relative(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
@ -376,7 +335,7 @@ def test_client_create_path_relative(httpserver: HTTPServer):
modelfile.flush() modelfile.flush()
response = client.create('dummy', path=modelfile.name) response = client.create('dummy', path=modelfile.name)
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.fixture @pytest.fixture
@ -394,12 +353,11 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
@ -409,7 +367,7 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir):
modelfile.flush() modelfile.flush()
response = client.create('dummy', path=modelfile.name) response = client.create('dummy', path=modelfile.name)
assert isinstance(response, dict) assert response['status'] == 'success'
def test_client_create_modelfile(httpserver: HTTPServer): def test_client_create_modelfile(httpserver: HTTPServer):
@ -418,18 +376,17 @@ def test_client_create_modelfile(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
with tempfile.NamedTemporaryFile() as blob: with tempfile.NamedTemporaryFile() as blob:
response = client.create('dummy', modelfile=f'FROM {blob.name}') response = client.create('dummy', modelfile=f'FROM {blob.name}')
assert isinstance(response, dict) assert response['status'] == 'success'
def test_client_create_modelfile_roundtrip(httpserver: HTTPServer): def test_client_create_modelfile_roundtrip(httpserver: HTTPServer):
@ -438,7 +395,7 @@ def test_client_create_modelfile_roundtrip(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': '''FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 'modelfile': '''FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
TEMPLATE """[INST] <<SYS>>{{.System}}<</SYS>> TEMPLATE """[INST] <<SYS>>{{.System}}<</SYS>>
{{.Prompt}} [/INST]""" {{.Prompt}} [/INST]"""
@ -452,9 +409,8 @@ PARAMETER stop [/INST]
PARAMETER stop <<SYS>> PARAMETER stop <<SYS>>
PARAMETER stop <</SYS>>''', PARAMETER stop <</SYS>>''',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
@ -478,7 +434,7 @@ PARAMETER stop <</SYS>>''',
] ]
), ),
) )
assert isinstance(response, dict) assert response['status'] == 'success'
def test_client_create_from_library(httpserver: HTTPServer): def test_client_create_from_library(httpserver: HTTPServer):
@ -486,17 +442,16 @@ def test_client_create_from_library(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM llama2', 'modelfile': 'FROM llama2',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = Client(httpserver.url_for('/')) client = Client(httpserver.url_for('/'))
response = client.create('dummy', modelfile='FROM llama2') response = client.create('dummy', modelfile='FROM llama2')
assert isinstance(response, dict) assert response['status'] == 'success'
def test_client_create_blob(httpserver: HTTPServer): def test_client_create_blob(httpserver: HTTPServer):
@ -524,14 +479,14 @@ def test_client_delete(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200)) httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200))
client = Client(httpserver.url_for('/api/delete')) client = Client(httpserver.url_for('/api/delete'))
response = client.delete('dummy') response = client.delete('dummy')
assert response == {'status': 'success'} assert response['status'] == 'success'
def test_client_copy(httpserver: HTTPServer): def test_client_copy(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200))
client = Client(httpserver.url_for('/api/copy')) client = Client(httpserver.url_for('/api/copy'))
response = client.copy('dum', 'dummer') response = client.copy('dum', 'dummer')
assert response == {'status': 'success'} assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -544,15 +499,22 @@ async def test_async_client_chat(httpserver: HTTPServer):
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'tools': [], 'tools': [],
'stream': False, 'stream': False,
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json({}) ).respond_with_json(
{
'model': 'dummy',
'message': {
'role': 'assistant',
'content': "I don't know.",
},
}
)
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}])
assert isinstance(response, dict) assert response['model'] == 'dummy'
assert response['message']['role'] == 'assistant'
assert response['message']['content'] == "I don't know."
@pytest.mark.asyncio @pytest.mark.asyncio
@ -583,9 +545,6 @@ async def test_async_client_chat_stream(httpserver: HTTPServer):
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'tools': [], 'tools': [],
'stream': True, 'stream': True,
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_handler(stream_handler) ).respond_with_handler(stream_handler)
@ -614,18 +573,25 @@ async def test_async_client_chat_images(httpserver: HTTPServer):
], ],
'tools': [], 'tools': [],
'stream': False, 'stream': False,
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json({}) ).respond_with_json(
{
'model': 'dummy',
'message': {
'role': 'assistant',
'content': "I don't know.",
},
}
)
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
with io.BytesIO() as b: with io.BytesIO() as b:
Image.new('RGB', (1, 1)).save(b, 'PNG') Image.new('RGB', (1, 1)).save(b, 'PNG')
response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?', 'images': [b.getvalue()]}]) response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?', 'images': [b.getvalue()]}])
assert isinstance(response, dict) assert response['model'] == 'dummy'
assert response['message']['role'] == 'assistant'
assert response['message']['content'] == "I don't know."
@pytest.mark.asyncio @pytest.mark.asyncio
@ -636,22 +602,19 @@ async def test_async_client_generate(httpserver: HTTPServer):
json={ json={
'model': 'dummy', 'model': 'dummy',
'prompt': 'Why is the sky blue?', 'prompt': 'Why is the sky blue?',
'suffix': '',
'system': '',
'template': '',
'context': [],
'stream': False, 'stream': False,
'raw': False,
'images': [],
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json({}) ).respond_with_json(
{
'model': 'dummy',
'response': 'Because it is.',
}
)
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
response = await client.generate('dummy', 'Why is the sky blue?') response = await client.generate('dummy', 'Why is the sky blue?')
assert isinstance(response, dict) assert response['model'] == 'dummy'
assert response['response'] == 'Because it is.'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -677,16 +640,7 @@ async def test_async_client_generate_stream(httpserver: HTTPServer):
json={ json={
'model': 'dummy', 'model': 'dummy',
'prompt': 'Why is the sky blue?', 'prompt': 'Why is the sky blue?',
'suffix': '',
'system': '',
'template': '',
'context': [],
'stream': True, 'stream': True,
'raw': False,
'images': [],
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_handler(stream_handler) ).respond_with_handler(stream_handler)
@ -707,25 +661,23 @@ async def test_async_client_generate_images(httpserver: HTTPServer):
json={ json={
'model': 'dummy', 'model': 'dummy',
'prompt': 'Why is the sky blue?', 'prompt': 'Why is the sky blue?',
'suffix': '',
'system': '',
'template': '',
'context': [],
'stream': False, 'stream': False,
'raw': False,
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
'format': '',
'options': {},
'keep_alive': None,
}, },
).respond_with_json({}) ).respond_with_json(
{
'model': 'dummy',
'response': 'Because it is.',
}
)
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
with tempfile.NamedTemporaryFile() as temp: with tempfile.NamedTemporaryFile() as temp:
Image.new('RGB', (1, 1)).save(temp, 'PNG') Image.new('RGB', (1, 1)).save(temp, 'PNG')
response = await client.generate('dummy', 'Why is the sky blue?', images=[temp.name]) response = await client.generate('dummy', 'Why is the sky blue?', images=[temp.name])
assert isinstance(response, dict) assert response['model'] == 'dummy'
assert response['response'] == 'Because it is.'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -734,15 +686,15 @@ async def test_async_client_pull(httpserver: HTTPServer):
'/api/pull', '/api/pull',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': False, 'stream': False,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
response = await client.pull('dummy') response = await client.pull('dummy')
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -761,7 +713,7 @@ async def test_async_client_pull_stream(httpserver: HTTPServer):
'/api/pull', '/api/pull',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': True, 'stream': True,
}, },
@ -781,15 +733,15 @@ async def test_async_client_push(httpserver: HTTPServer):
'/api/push', '/api/push',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': False, 'stream': False,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
response = await client.push('dummy') response = await client.push('dummy')
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -806,7 +758,7 @@ async def test_async_client_push_stream(httpserver: HTTPServer):
'/api/push', '/api/push',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'insecure': False, 'insecure': False,
'stream': True, 'stream': True,
}, },
@ -827,12 +779,11 @@ async def test_async_client_create_path(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
@ -842,7 +793,7 @@ async def test_async_client_create_path(httpserver: HTTPServer):
modelfile.flush() modelfile.flush()
response = await client.create('dummy', path=modelfile.name) response = await client.create('dummy', path=modelfile.name)
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -852,12 +803,11 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
@ -867,7 +817,7 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer):
modelfile.flush() modelfile.flush()
response = await client.create('dummy', path=modelfile.name) response = await client.create('dummy', path=modelfile.name)
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -877,12 +827,11 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
@ -892,7 +841,7 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho
modelfile.flush() modelfile.flush()
response = await client.create('dummy', path=modelfile.name) response = await client.create('dummy', path=modelfile.name)
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -902,18 +851,17 @@ async def test_async_client_create_modelfile(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
with tempfile.NamedTemporaryFile() as blob: with tempfile.NamedTemporaryFile() as blob:
response = await client.create('dummy', modelfile=f'FROM {blob.name}') response = await client.create('dummy', modelfile=f'FROM {blob.name}')
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -923,7 +871,7 @@ async def test_async_client_create_modelfile_roundtrip(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': '''FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 'modelfile': '''FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
TEMPLATE """[INST] <<SYS>>{{.System}}<</SYS>> TEMPLATE """[INST] <<SYS>>{{.System}}<</SYS>>
{{.Prompt}} [/INST]""" {{.Prompt}} [/INST]"""
@ -937,9 +885,8 @@ PARAMETER stop [/INST]
PARAMETER stop <<SYS>> PARAMETER stop <<SYS>>
PARAMETER stop <</SYS>>''', PARAMETER stop <</SYS>>''',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
@ -963,7 +910,7 @@ PARAMETER stop <</SYS>>''',
] ]
), ),
) )
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -972,17 +919,16 @@ async def test_async_client_create_from_library(httpserver: HTTPServer):
'/api/create', '/api/create',
method='POST', method='POST',
json={ json={
'name': 'dummy', 'model': 'dummy',
'modelfile': 'FROM llama2', 'modelfile': 'FROM llama2',
'stream': False, 'stream': False,
'quantize': None,
}, },
).respond_with_json({}) ).respond_with_json({'status': 'success'})
client = AsyncClient(httpserver.url_for('/')) client = AsyncClient(httpserver.url_for('/'))
response = await client.create('dummy', modelfile='FROM llama2') response = await client.create('dummy', modelfile='FROM llama2')
assert isinstance(response, dict) assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -1013,7 +959,7 @@ async def test_async_client_delete(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200)) httpserver.expect_ordered_request(PrefixPattern('/api/delete'), method='DELETE').respond_with_response(Response(status=200))
client = AsyncClient(httpserver.url_for('/api/delete')) client = AsyncClient(httpserver.url_for('/api/delete'))
response = await client.delete('dummy') response = await client.delete('dummy')
assert response == {'status': 'success'} assert response['status'] == 'success'
@pytest.mark.asyncio @pytest.mark.asyncio
@ -1021,4 +967,4 @@ async def test_async_client_copy(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200)) httpserver.expect_ordered_request(PrefixPattern('/api/copy'), method='POST').respond_with_response(Response(status=200))
client = AsyncClient(httpserver.url_for('/api/copy')) client = AsyncClient(httpserver.url_for('/api/copy'))
response = await client.copy('dum', 'dummer') response = await client.copy('dum', 'dummer')
assert response == {'status': 'success'} assert response['status'] == 'success'