mirror of
https://github.com/ollama/ollama-python.git
synced 2026-01-13 21:57:16 +08:00
25 lines
667 B
Python
25 lines
667 B
Python
from typing import Iterable
|
|
|
|
import ollama
|
|
|
|
|
|
def print_logprobs(logprobs: Iterable[dict], label: str) -> None:
|
|
print(f'\n{label}:')
|
|
for entry in logprobs:
|
|
token = entry.get('token', '')
|
|
logprob = entry.get('logprob')
|
|
print(f' token={token!r:<12} logprob={logprob:.3f}')
|
|
for alt in entry.get('top_logprobs', []):
|
|
if alt['token'] != token:
|
|
print(f' alt -> {alt["token"]!r:<12} ({alt["logprob"]:.3f})')
|
|
|
|
|
|
response = ollama.generate(
|
|
model='gemma3',
|
|
prompt='hi! be concise.',
|
|
logprobs=True,
|
|
top_logprobs=3,
|
|
)
|
|
print('Generate response:', response['response'])
|
|
print_logprobs(response.get('logprobs', []), 'generate logprobs')
|