chore: bump ruff and ensure imports are sorted (#385)

* chore: upgrade ruff & sort imports
This commit is contained in:
Aarni Koskela 2025-01-15 02:34:16 +02:00 committed by GitHub
parent 02495ffd77
commit 89e719ab92
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 99 additions and 91 deletions

View File

@ -1,4 +1,5 @@
import asyncio
from ollama import AsyncClient

View File

@ -1,4 +1,5 @@
import asyncio
import ollama

View File

@ -1,7 +1,9 @@
from pydantic import BaseModel
from ollama import AsyncClient
import asyncio
from pydantic import BaseModel
from ollama import AsyncClient
# Define the schema for the response
class FriendInfo(BaseModel):

View File

@ -1,6 +1,7 @@
import asyncio
from ollama import ChatResponse
import ollama
from ollama import ChatResponse
def add_two_numbers(a: int, b: int) -> int:

View File

@ -1,6 +1,5 @@
from ollama import chat
messages = [
{
'role': 'user',

View File

@ -1,6 +1,5 @@
from ollama import chat
messages = [
{
'role': 'user',

View File

@ -1,5 +1,4 @@
from ollama import generate
for part in generate('llama3.2', 'Why is the sky blue?', stream=True):
print(part['response'], end='', flush=True)

View File

@ -1,5 +1,4 @@
from ollama import generate
response = generate('llama3.2', 'Why is the sky blue?')
print(response['response'])

View File

@ -1,5 +1,4 @@
from ollama import list
from ollama import ListResponse
from ollama import ListResponse, list
response: ListResponse = list()

View File

@ -1,4 +1,5 @@
from ollama import chat
# from pathlib import Path
# Pass in the path to the image

View File

@ -1,10 +1,10 @@
import sys
import random
import sys
import httpx
from ollama import generate
latest = httpx.get('https://xkcd.com/info.0.json')
latest.raise_for_status()

View File

@ -1,5 +1,4 @@
from ollama import ps, pull, chat
from ollama import ProcessResponse
from ollama import ProcessResponse, chat, ps, pull
# Ensure at least one model is loaded
response = pull('llama3.2', stream=True)

View File

@ -1,6 +1,6 @@
from tqdm import tqdm
from ollama import pull
from ollama import pull
current_digest, bars = '', {}
for progress in pull('llama3.2', stream=True):

View File

@ -1,6 +1,8 @@
from pathlib import Path
from pydantic import BaseModel
from typing import Literal
from pydantic import BaseModel
from ollama import chat

View File

@ -1,6 +1,7 @@
from ollama import chat
from pydantic import BaseModel
from ollama import chat
# Define the schema for the response
class FriendInfo(BaseModel):

View File

@ -1,5 +1,4 @@
from ollama import chat
from ollama import ChatResponse
from ollama import ChatResponse, chat
def add_two_numbers(a: int, b: int) -> int:

View File

@ -1,40 +1,40 @@
from ollama._client import Client, AsyncClient
from ollama._client import AsyncClient, Client
from ollama._types import (
Options,
Message,
Image,
Tool,
GenerateResponse,
ChatResponse,
EmbedResponse,
EmbeddingsResponse,
StatusResponse,
ProgressResponse,
EmbedResponse,
GenerateResponse,
Image,
ListResponse,
ShowResponse,
Message,
Options,
ProcessResponse,
ProgressResponse,
RequestError,
ResponseError,
ShowResponse,
StatusResponse,
Tool,
)
__all__ = [
'Client',
'AsyncClient',
'Options',
'Message',
'Image',
'Tool',
'GenerateResponse',
'ChatResponse',
'Client',
'EmbedResponse',
'EmbeddingsResponse',
'StatusResponse',
'ProgressResponse',
'GenerateResponse',
'Image',
'ListResponse',
'ShowResponse',
'Message',
'Options',
'ProcessResponse',
'ProgressResponse',
'RequestError',
'ResponseError',
'ShowResponse',
'StatusResponse',
'Tool',
]
_client = Client()

View File

@ -1,15 +1,17 @@
import os
import json
import platform
import ipaddress
import json
import os
import platform
import sys
import urllib.parse
from hashlib import sha256
from os import PathLike
from pathlib import Path
from hashlib import sha256
from typing import (
Any,
Callable,
Dict,
List,
Literal,
Mapping,
Optional,
@ -18,21 +20,16 @@ from typing import (
TypeVar,
Union,
overload,
Dict,
List,
)
import sys
from pydantic.json_schema import JsonSchemaValue
from ollama._utils import convert_function_to_tool
if sys.version_info < (3, 9):
from typing import Iterator, AsyncIterator
from typing import AsyncIterator, Iterator
else:
from collections.abc import Iterator, AsyncIterator
from collections.abc import AsyncIterator, Iterator
from importlib import metadata
@ -46,13 +43,13 @@ import httpx
from ollama._types import (
ChatRequest,
ChatResponse,
CreateRequest,
CopyRequest,
CreateRequest,
DeleteRequest,
EmbedRequest,
EmbedResponse,
EmbeddingsRequest,
EmbeddingsResponse,
EmbedRequest,
EmbedResponse,
GenerateRequest,
GenerateResponse,
Image,
@ -70,7 +67,6 @@ from ollama._types import (
Tool,
)
T = TypeVar('T')

View File

@ -1,11 +1,8 @@
import json
from base64 import b64decode, b64encode
from pathlib import Path
from datetime import datetime
from typing import Any, Mapping, Optional, Union, Sequence, Dict, List
from pydantic.json_schema import JsonSchemaValue
from typing_extensions import Annotated, Literal
from pathlib import Path
from typing import Any, Dict, List, Mapping, Optional, Sequence, Union
from pydantic import (
BaseModel,
@ -14,6 +11,8 @@ from pydantic import (
Field,
model_serializer,
)
from pydantic.json_schema import JsonSchemaValue
from typing_extensions import Annotated, Literal
class SubscriptableBaseModel(BaseModel):

View File

@ -1,10 +1,12 @@
from __future__ import annotations
from collections import defaultdict
import inspect
from typing import Callable, Union
import re
from collections import defaultdict
from typing import Callable, Union
import pydantic
from ollama._types import Tool

40
poetry.lock generated
View File

@ -556,30 +556,30 @@ Werkzeug = ">=2.0.0"
[[package]]
name = "ruff"
version = "0.7.4"
version = "0.9.1"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
{file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
{file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
{file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
{file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
{file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
{file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
{file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
{file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
{file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
{file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
{file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
{file = "ruff-0.9.1-py3-none-linux_armv6l.whl", hash = "sha256:84330dda7abcc270e6055551aca93fdde1b0685fc4fd358f26410f9349cf1743"},
{file = "ruff-0.9.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3cae39ba5d137054b0e5b472aee3b78a7c884e61591b100aeb544bcd1fc38d4f"},
{file = "ruff-0.9.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50c647ff96f4ba288db0ad87048257753733763b409b2faf2ea78b45c8bb7fcb"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c8b149e9c7353cace7d698e1656ffcf1e36e50f8ea3b5d5f7f87ff9986a7ca"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:beb3298604540c884d8b282fe7625651378e1986c25df51dec5b2f60cafc31ce"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39d0174ccc45c439093971cc06ed3ac4dc545f5e8bdacf9f067adf879544d969"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69572926c0f0c9912288915214ca9b2809525ea263603370b9e00bed2ba56dbd"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:937267afce0c9170d6d29f01fcd1f4378172dec6760a9f4dface48cdabf9610a"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:186c2313de946f2c22bdf5954b8dd083e124bcfb685732cfb0beae0c47233d9b"},
{file = "ruff-0.9.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f94942a3bb767675d9a051867c036655fe9f6c8a491539156a6f7e6b5f31831"},
{file = "ruff-0.9.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:728d791b769cc28c05f12c280f99e8896932e9833fef1dd8756a6af2261fd1ab"},
{file = "ruff-0.9.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f312c86fb40c5c02b44a29a750ee3b21002bd813b5233facdaf63a51d9a85e1"},
{file = "ruff-0.9.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ae017c3a29bee341ba584f3823f805abbe5fe9cd97f87ed07ecbf533c4c88366"},
{file = "ruff-0.9.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5dc40a378a0e21b4cfe2b8a0f1812a6572fc7b230ef12cd9fac9161aa91d807f"},
{file = "ruff-0.9.1-py3-none-win32.whl", hash = "sha256:46ebf5cc106cf7e7378ca3c28ce4293b61b449cd121b98699be727d40b79ba72"},
{file = "ruff-0.9.1-py3-none-win_amd64.whl", hash = "sha256:342a824b46ddbcdddd3abfbb332fa7fcaac5488bf18073e841236aadf4ad5c19"},
{file = "ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7"},
{file = "ruff-0.9.1.tar.gz", hash = "sha256:fd2b25ecaf907d6458fa842675382c8597b3c746a2dde6717fe3415425df0c17"},
]
[[package]]
@ -640,4 +640,4 @@ watchdog = ["watchdog (>=2.3)"]
[metadata]
lock-version = "2.1"
python-versions = "^3.8"
content-hash = "8e93767305535b0a02f0d724edf1249fd928ff1021644eb9dc26dbfa191f6971"
content-hash = "7562d45c19103788a43209a67ab1d0a9c028f9495c0ac25c1b63d93de325827e"

View File

@ -21,7 +21,7 @@ pytest = ">=7.4.3,<9.0.0"
pytest-asyncio = ">=0.23.2,<0.25.0"
pytest-cov = ">=4.1,<6.0"
pytest-httpserver = "^1.0.8"
ruff = ">=0.1.8,<0.8.0"
ruff = ">=0.9.1,<0.10.0"
[build-system]
requires = ["poetry-core"]
@ -36,8 +36,16 @@ quote-style = "single"
indent-style = "space"
[tool.ruff.lint]
select = ["E", "F", "B"]
ignore = ["E501"]
select = [
"E", # pycodestyle errors
"F", # pyflakes
"B", # bugbear (likely bugs)
"I", # sort imports
"RUF022", # sort __all__
]
ignore = [
"E501", # line too long
]
[tool.pytest.ini_options]
addopts = '--doctest-modules --ignore examples'

View File

@ -1,14 +1,15 @@
import base64
import os
import json
from pydantic import ValidationError, BaseModel
import pytest
import os
import tempfile
from pathlib import Path
import pytest
from pydantic import BaseModel, ValidationError
from pytest_httpserver import HTTPServer, URIPattern
from werkzeug.wrappers import Request, Response
from ollama._client import Client, AsyncClient, _copy_tools
from ollama._client import AsyncClient, Client, _copy_tools
PNG_BASE64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'
PNG_BYTES = base64.b64decode(PNG_BASE64)
@ -91,7 +92,7 @@ def test_client_chat_stream(httpserver: HTTPServer):
@pytest.mark.parametrize('message_format', ('dict', 'pydantic_model'))
@pytest.mark.parametrize('file_style', ('path', 'bytes'))
def test_client_chat_images(httpserver: HTTPServer, message_format: str, file_style: str, tmp_path):
from ollama._types import Message, Image
from ollama._types import Image, Message
httpserver.expect_ordered_request(
'/api/chat',

View File

@ -1,9 +1,10 @@
import tempfile
from base64 import b64encode
from pathlib import Path
import pytest
from ollama._types import CreateRequest, Image
import tempfile
def test_image_serialization_bytes():

View File

@ -2,7 +2,6 @@ import json
import sys
from typing import Dict, List, Mapping, Sequence, Set, Tuple, Union
from ollama._utils import convert_function_to_tool
@ -118,7 +117,7 @@ def test_function_with_all_types():
def test_function_docstring_parsing():
from typing import List, Dict, Any
from typing import Any, Dict, List
def func_with_complex_docs(x: int, y: List[str]) -> Dict[str, Any]:
"""