mirror of
https://github.com/ollama/ollama-python.git
synced 2026-05-01 11:48:17 +08:00
Merge a343614ff2 into dbccf192ac
This commit is contained in:
commit
ffd6aaeca9
@ -572,7 +572,7 @@ class ShowResponse(SubscriptableBaseModel):
|
||||
|
||||
details: Optional[ModelDetails] = None
|
||||
|
||||
modelinfo: Optional[Mapping[str, Any]] = Field(alias='model_info')
|
||||
modelinfo: Optional[Mapping[str, Any]] = Field(default=None, alias='model_info')
|
||||
|
||||
parameters: Optional[str] = None
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from ollama._types import CreateRequest, Image
|
||||
from ollama._types import CreateRequest, Image, ShowResponse
|
||||
|
||||
|
||||
def test_image_serialization_bytes():
|
||||
@ -92,3 +92,59 @@ def test_create_request_serialization_license_list():
|
||||
request = CreateRequest(model='test-model', license=['MIT', 'Apache-2.0'])
|
||||
serialized = request.model_dump()
|
||||
assert serialized['license'] == ['MIT', 'Apache-2.0']
|
||||
|
||||
|
||||
def test_show_response_without_model_info():
|
||||
"""
|
||||
Test that ShowResponse can be created without model_info field.
|
||||
|
||||
This is a regression test for issue #607 where certain cloud models
|
||||
(e.g., glm-4.7:cloud, qwen3-next:80b-cloud, deepseek-v3.2:cloud) return
|
||||
responses without the model_info field, causing a ValidationError.
|
||||
"""
|
||||
# Response data without model_info field (as returned by some cloud models)
|
||||
response_data = {
|
||||
'modelfile': '# Modelfile generated by "ollama show"',
|
||||
'template': '{{ .Prompt }}',
|
||||
'details': {
|
||||
'parent_model': '',
|
||||
'format': 'gguf',
|
||||
'family': 'glm',
|
||||
'families': ['glm'],
|
||||
'parameter_size': '9.4B',
|
||||
'quantization_level': 'Q4_K_M',
|
||||
},
|
||||
'capabilities': ['completion'],
|
||||
'modified_at': '2025-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
# This should not raise a ValidationError
|
||||
response = ShowResponse.model_validate(response_data)
|
||||
|
||||
assert response.modelfile == '# Modelfile generated by "ollama show"'
|
||||
assert response.template == '{{ .Prompt }}'
|
||||
assert response.modelinfo is None # model_info was not provided
|
||||
assert response.capabilities == ['completion']
|
||||
|
||||
|
||||
def test_show_response_with_model_info():
|
||||
"""
|
||||
Test that ShowResponse still works correctly when model_info is provided.
|
||||
"""
|
||||
response_data = {
|
||||
'modelfile': '# Modelfile',
|
||||
'template': '{{ .Prompt }}',
|
||||
'model_info': {
|
||||
'general.architecture': 'llama',
|
||||
'general.parameter_count': 7000000000,
|
||||
},
|
||||
'capabilities': ['completion'],
|
||||
'modified_at': '2025-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
response = ShowResponse.model_validate(response_data)
|
||||
|
||||
assert response.modelfile == '# Modelfile'
|
||||
assert response.modelinfo is not None
|
||||
assert response.modelinfo['general.architecture'] == 'llama'
|
||||
assert response.modelinfo['general.parameter_count'] == 7000000000
|
||||
|
||||
Loading…
Reference in New Issue
Block a user