From aa4b476f26a1147ea1597faa9d8583c0216cb8a7 Mon Sep 17 00:00:00 2001 From: Devon Rifkin Date: Thu, 7 Aug 2025 14:39:36 -0700 Subject: [PATCH] add support for 'high'/'medium'/'low' think values currently only supported on gpt-oss, but as more models come out with support like this we'll likely relax the particular values that can be provided --- examples/README.md | 3 +++ examples/thinking-levels.py | 26 ++++++++++++++++++++++++++ ollama/_client.py | 18 +++++++++--------- ollama/_types.py | 4 ++-- 4 files changed, 40 insertions(+), 11 deletions(-) create mode 100644 examples/thinking-levels.py diff --git a/examples/README.md b/examples/README.md index ff6bdb8..0b88da1 100644 --- a/examples/README.md +++ b/examples/README.md @@ -73,3 +73,6 @@ Requirement: `pip install tqdm` ### Thinking (generate) - Enable thinking mode for a model - [thinking-generate.py](thinking-generate.py) + +### Thinking (levels) - Choose the thinking level +- [thinking-levels.py](thinking-generate.py) diff --git a/examples/thinking-levels.py b/examples/thinking-levels.py new file mode 100644 index 0000000..8fd581c --- /dev/null +++ b/examples/thinking-levels.py @@ -0,0 +1,26 @@ +from ollama import chat + + +def heading(text): + print(text) + print('=' * len(text)) + + +messages = [ + {'role': 'user', 'content': 'What is 10 + 23?'}, +] + +# gpt-oss supports 'low', 'medium', 'high' +levels = ['low', 'medium', 'high'] +for i, level in enumerate(levels): + response = chat('gpt-oss:20b', messages=messages, think=level) + + heading(f'Thinking ({level})') + print(response.message.thinking) + print('\n') + heading('Response') + print(response.message.content) + print('\n') + if i < len(levels) - 1: + print('-' * 20) + print('\n') diff --git a/ollama/_client.py b/ollama/_client.py index 4555a93..0a85a74 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -274,7 +274,7 @@ class Client(BaseClient): *, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: Literal[False] = False, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, @@ -288,7 +288,7 @@ class Client(BaseClient): *, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: Literal[True] = True, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, @@ -301,7 +301,7 @@ class Client(BaseClient): *, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: bool = False, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, @@ -702,7 +702,7 @@ class AsyncClient(BaseClient): template: str = '', context: Optional[Sequence[int]] = None, stream: Literal[False] = False, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, raw: bool = False, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, images: Optional[Sequence[Union[str, bytes, Image]]] = None, @@ -721,7 +721,7 @@ class AsyncClient(BaseClient): template: str = '', context: Optional[Sequence[int]] = None, stream: Literal[True] = True, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, raw: bool = False, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, images: Optional[Sequence[Union[str, bytes, Image]]] = None, @@ -739,7 +739,7 @@ class AsyncClient(BaseClient): template: Optional[str] = None, context: Optional[Sequence[int]] = None, stream: bool = False, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, raw: Optional[bool] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, images: Optional[Sequence[Union[str, bytes, Image]]] = None, @@ -785,7 +785,7 @@ class AsyncClient(BaseClient): *, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: Literal[False] = False, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, @@ -799,7 +799,7 @@ class AsyncClient(BaseClient): *, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: Literal[True] = True, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, @@ -812,7 +812,7 @@ class AsyncClient(BaseClient): *, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, stream: bool = False, - think: Optional[bool] = None, + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None, keep_alive: Optional[Union[float, str]] = None, diff --git a/ollama/_types.py b/ollama/_types.py index db928e5..0482287 100644 --- a/ollama/_types.py +++ b/ollama/_types.py @@ -207,7 +207,7 @@ class GenerateRequest(BaseGenerateRequest): images: Optional[Sequence[Image]] = None 'Image data for multimodal models.' - think: Optional[bool] = None + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None 'Enable thinking mode (for thinking models).' @@ -357,7 +357,7 @@ class ChatRequest(BaseGenerateRequest): tools: Optional[Sequence[Tool]] = None 'Tools to use for the chat.' - think: Optional[bool] = None + think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None 'Enable thinking mode (for thinking models).'