mirror of
https://github.com/ollama/ollama-python.git
synced 2026-01-14 06:07:17 +08:00
* Examples and README updates --------- Co-authored-by: fujitatomoya <tomoya.fujita825@gmail.com> Co-authored-by: Michael Yang <mxyng@pm.me>
28 lines
783 B
Python
28 lines
783 B
Python
from ollama import ps, pull, chat
|
|
from ollama import ProcessResponse
|
|
|
|
# Ensure at least one model is loaded
|
|
response = pull('llama3.2', stream=True)
|
|
progress_states = set()
|
|
for progress in response:
|
|
if progress.get('status') in progress_states:
|
|
continue
|
|
progress_states.add(progress.get('status'))
|
|
print(progress.get('status'))
|
|
|
|
print('\n')
|
|
|
|
print('Waiting for model to load... \n')
|
|
chat(model='llama3.2', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}])
|
|
|
|
|
|
response: ProcessResponse = ps()
|
|
for model in response.models:
|
|
print('Model: ', model.model)
|
|
print(' Digest: ', model.digest)
|
|
print(' Expires at: ', model.expires_at)
|
|
print(' Size: ', model.size)
|
|
print(' Size vram: ', model.size_vram)
|
|
print(' Details: ', model.details)
|
|
print('\n')
|