mirror of
https://github.com/ollama/ollama-python.git
synced 2026-01-13 21:57:16 +08:00
Simple Example (#179)
This commit is contained in:
parent
d25c4aa1cf
commit
982d65fea0
31
examples/ps/main.py
Normal file
31
examples/ps/main.py
Normal file
@ -0,0 +1,31 @@
|
||||
from ollama import ps, pull, chat
|
||||
|
||||
response = pull('mistral', stream=True)
|
||||
progress_states = set()
|
||||
for progress in response:
|
||||
if progress.get('status') in progress_states:
|
||||
continue
|
||||
progress_states.add(progress.get('status'))
|
||||
print(progress.get('status'))
|
||||
|
||||
print('\n')
|
||||
|
||||
response = chat('mistral', messages=[{'role': 'user', 'content': 'Hello!'}])
|
||||
print(response['message']['content'])
|
||||
|
||||
print('\n')
|
||||
|
||||
response = ps()
|
||||
|
||||
name = response['models'][0]['name']
|
||||
size = response['models'][0]['size']
|
||||
size_vram = response['models'][0]['size_vram']
|
||||
|
||||
if size == size_vram:
|
||||
print(f'{name}: 100% GPU')
|
||||
elif not size_vram:
|
||||
print(f'{name}: 100% CPU')
|
||||
else:
|
||||
size_cpu = size - size_vram
|
||||
cpu_percent = round(size_cpu / size * 100)
|
||||
print(f'{name}: {cpu_percent}% CPU/{100 - cpu_percent}% GPU')
|
||||
Loading…
Reference in New Issue
Block a user