mirror of
https://github.com/ollama/ollama-python.git
synced 2026-01-13 21:57:16 +08:00
add support for 'high'/'medium'/'low' think values
currently only supported on gpt-oss, but as more models come out with support like this we'll likely relax the particular values that can be provided
This commit is contained in:
parent
34e98bd237
commit
aa4b476f26
@ -73,3 +73,6 @@ Requirement: `pip install tqdm`
|
||||
|
||||
### Thinking (generate) - Enable thinking mode for a model
|
||||
- [thinking-generate.py](thinking-generate.py)
|
||||
|
||||
### Thinking (levels) - Choose the thinking level
|
||||
- [thinking-levels.py](thinking-generate.py)
|
||||
|
||||
26
examples/thinking-levels.py
Normal file
26
examples/thinking-levels.py
Normal file
@ -0,0 +1,26 @@
|
||||
from ollama import chat
|
||||
|
||||
|
||||
def heading(text):
|
||||
print(text)
|
||||
print('=' * len(text))
|
||||
|
||||
|
||||
messages = [
|
||||
{'role': 'user', 'content': 'What is 10 + 23?'},
|
||||
]
|
||||
|
||||
# gpt-oss supports 'low', 'medium', 'high'
|
||||
levels = ['low', 'medium', 'high']
|
||||
for i, level in enumerate(levels):
|
||||
response = chat('gpt-oss:20b', messages=messages, think=level)
|
||||
|
||||
heading(f'Thinking ({level})')
|
||||
print(response.message.thinking)
|
||||
print('\n')
|
||||
heading('Response')
|
||||
print(response.message.content)
|
||||
print('\n')
|
||||
if i < len(levels) - 1:
|
||||
print('-' * 20)
|
||||
print('\n')
|
||||
@ -274,7 +274,7 @@ class Client(BaseClient):
|
||||
*,
|
||||
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
|
||||
stream: Literal[False] = False,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
options: Optional[Union[Mapping[str, Any], Options]] = None,
|
||||
keep_alive: Optional[Union[float, str]] = None,
|
||||
@ -288,7 +288,7 @@ class Client(BaseClient):
|
||||
*,
|
||||
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
|
||||
stream: Literal[True] = True,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
options: Optional[Union[Mapping[str, Any], Options]] = None,
|
||||
keep_alive: Optional[Union[float, str]] = None,
|
||||
@ -301,7 +301,7 @@ class Client(BaseClient):
|
||||
*,
|
||||
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
|
||||
stream: bool = False,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
options: Optional[Union[Mapping[str, Any], Options]] = None,
|
||||
keep_alive: Optional[Union[float, str]] = None,
|
||||
@ -702,7 +702,7 @@ class AsyncClient(BaseClient):
|
||||
template: str = '',
|
||||
context: Optional[Sequence[int]] = None,
|
||||
stream: Literal[False] = False,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
raw: bool = False,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
images: Optional[Sequence[Union[str, bytes, Image]]] = None,
|
||||
@ -721,7 +721,7 @@ class AsyncClient(BaseClient):
|
||||
template: str = '',
|
||||
context: Optional[Sequence[int]] = None,
|
||||
stream: Literal[True] = True,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
raw: bool = False,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
images: Optional[Sequence[Union[str, bytes, Image]]] = None,
|
||||
@ -739,7 +739,7 @@ class AsyncClient(BaseClient):
|
||||
template: Optional[str] = None,
|
||||
context: Optional[Sequence[int]] = None,
|
||||
stream: bool = False,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
raw: Optional[bool] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
images: Optional[Sequence[Union[str, bytes, Image]]] = None,
|
||||
@ -785,7 +785,7 @@ class AsyncClient(BaseClient):
|
||||
*,
|
||||
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
|
||||
stream: Literal[False] = False,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
options: Optional[Union[Mapping[str, Any], Options]] = None,
|
||||
keep_alive: Optional[Union[float, str]] = None,
|
||||
@ -799,7 +799,7 @@ class AsyncClient(BaseClient):
|
||||
*,
|
||||
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
|
||||
stream: Literal[True] = True,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
options: Optional[Union[Mapping[str, Any], Options]] = None,
|
||||
keep_alive: Optional[Union[float, str]] = None,
|
||||
@ -812,7 +812,7 @@ class AsyncClient(BaseClient):
|
||||
*,
|
||||
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
|
||||
stream: bool = False,
|
||||
think: Optional[bool] = None,
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
|
||||
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
|
||||
options: Optional[Union[Mapping[str, Any], Options]] = None,
|
||||
keep_alive: Optional[Union[float, str]] = None,
|
||||
|
||||
@ -207,7 +207,7 @@ class GenerateRequest(BaseGenerateRequest):
|
||||
images: Optional[Sequence[Image]] = None
|
||||
'Image data for multimodal models.'
|
||||
|
||||
think: Optional[bool] = None
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None
|
||||
'Enable thinking mode (for thinking models).'
|
||||
|
||||
|
||||
@ -357,7 +357,7 @@ class ChatRequest(BaseGenerateRequest):
|
||||
tools: Optional[Sequence[Tool]] = None
|
||||
'Tools to use for the chat.'
|
||||
|
||||
think: Optional[bool] = None
|
||||
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None
|
||||
'Enable thinking mode (for thinking models).'
|
||||
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user