Delete extra async examples

This commit is contained in:
ParthSareen 2024-11-20 14:34:09 -08:00
parent 6fa242894f
commit 92a1cd92e3
3 changed files with 0 additions and 62 deletions

View File

@ -33,7 +33,6 @@ python3 <example>.py
### Ollama List - List all downloaded models and their properties
- [list.py](list.py)
- [async-list.py](async-list.py)
### Ollama ps - Show model status with CPU/GPU usage

View File

@ -1,21 +0,0 @@
import asyncio
import ollama
async def main():
client = ollama.AsyncClient()
response = await client.list()
for model in response.models:
if model.details:
print(f'Name: {model.model}')
print(f'Size (MB): {(model.size.real / 1024 / 1024):.2f}')
print(f'Format: {model.details.format}')
print(f'Family: {model.details.family}')
print(f'Parameter Size: {model.details.parameter_size}')
print(f'Quantization Level: {model.details.quantization_level}')
print('-' * 50)
if __name__ == '__main__':
asyncio.run(main())

View File

@ -1,40 +0,0 @@
import asyncio
from ollama import AsyncClient
async def main():
client = AsyncClient()
response = await client.pull('llama3.1', stream=True)
progress_states = set()
async for progress in response:
if progress.get('status') in progress_states:
continue
progress_states.add(progress.get('status'))
print(progress.get('status'))
print('\n')
response = await client.chat('llama3.1', messages=[{'role': 'user', 'content': 'Hello!'}])
print(response['message']['content'])
print('\n')
response = await client.ps()
name = response['models'][0]['name']
size = response['models'][0]['size']
size_vram = response['models'][0]['size_vram']
if size == size_vram:
print(f'{name}: 100% GPU')
elif not size_vram:
print(f'{name}: 100% CPU')
else:
size_cpu = size - size_vram
cpu_percent = round(size_cpu / size * 100)
print(f'{name}: {cpu_percent}% CPU/{100 - cpu_percent}% GPU')
if __name__ == '__main__':
asyncio.run(main())