Skip to content

Commit 46d8ab8

Browse files
authored
ui.Chat() now correctly handles new ollama.chat() return value introduced in ollama 0.4 (#1787)
1 parent ba97d6d commit 46d8ab8

File tree

6 files changed

+50
-22
lines changed

6 files changed

+50
-22
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,12 @@ All notable changes to Shiny for Python will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [UNRELEASED]
9+
10+
### Bug fixes
11+
12+
* `ui.Chat()` now correctly handles new `ollama.chat()` return value introduced in `ollama` v0.4. (#1787)
13+
814
## [1.2.1] - 2024-11-14
915

1016
### Bug fixes

shiny/templates/chat/hello-providers/ollama/app.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ async def _():
2929
# Create a response message stream
3030
# Assumes you've run `ollama run llama3` to start the server
3131
response = ollama.chat(
32-
model="llama3",
32+
model="llama3.2",
3333
messages=messages,
3434
stream=True,
3535
)

shiny/ui/_chat.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -914,15 +914,6 @@ def _get_token_count(
914914
if self._tokenizer is None:
915915
self._tokenizer = get_default_tokenizer()
916916

917-
if self._tokenizer is None:
918-
raise ValueError(
919-
"A tokenizer is required to impose `token_limits` on messages. "
920-
"To get a generic default tokenizer, install the `tokenizers` "
921-
"package (`pip install tokenizers`). "
922-
"To get a more precise token count, provide a specific tokenizer "
923-
"to the `Chat` constructor."
924-
)
925-
926917
encoded = self._tokenizer.encode(content)
927918
if isinstance(encoded, TokenizersEncoding):
928919
return len(encoded.ids)

shiny/ui/_chat_normalize.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -231,11 +231,19 @@ def normalize_chunk(self, chunk: "dict[str, Any]") -> ChatMessage:
231231
return super().normalize_chunk(msg)
232232

233233
def can_normalize(self, message: Any) -> bool:
234-
if not isinstance(message, dict):
235-
return False
236-
if "message" not in message:
234+
try:
235+
from ollama import ChatResponse
236+
237+
# Ollama<0.4 used TypedDict (now it uses pydantic)
238+
# https://github.com/ollama/ollama-python/pull/276
239+
if isinstance(ChatResponse, dict):
240+
return "message" in message and super().can_normalize(
241+
message["message"]
242+
)
243+
else:
244+
return isinstance(message, ChatResponse)
245+
except Exception:
237246
return False
238-
return super().can_normalize(message["message"])
239247

240248
def can_normalize_chunk(self, chunk: Any) -> bool:
241249
return self.can_normalize(chunk)

shiny/ui/_chat_tokenizer.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -45,12 +45,23 @@ def encode(
4545
TokenEncoding = Union[TiktokenEncoding, TokenizersTokenizer]
4646

4747

48-
def get_default_tokenizer() -> TokenizersTokenizer | None:
48+
def get_default_tokenizer() -> TokenizersTokenizer:
4949
try:
5050
from tokenizers import Tokenizer
5151

5252
return Tokenizer.from_pretrained("bert-base-cased") # type: ignore
53-
except Exception:
54-
pass
55-
56-
return None
53+
except ImportError:
54+
raise ImportError(
55+
"Failed to download a default tokenizer. "
56+
"A tokenizer is required to impose `token_limits` on `chat.messages()`. "
57+
"To get a generic default tokenizer, install the `tokenizers` "
58+
"package (`pip install tokenizers`). "
59+
)
60+
except Exception as e:
61+
raise RuntimeError(
62+
"Failed to download a default tokenizer. "
63+
"A tokenizer is required to impose `token_limits` on `chat.messages()`. "
64+
"Try manually downloading a tokenizer using "
65+
"`tokenizers.Tokenizer.from_pretrained()` and passing it to `ui.Chat()`."
66+
f"Error: {e}"
67+
) from e

tests/pytest/test_chat.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -333,6 +333,20 @@ def test_openai_normalization():
333333
assert msg == {"content": "Hello ", "role": "assistant"}
334334

335335

336+
def test_ollama_normalization():
337+
from ollama import ChatResponse
338+
from ollama import Message as OllamaMessage
339+
340+
# Mock return object from ollama.chat()
341+
msg = ChatResponse(
342+
message=OllamaMessage(content="Hello world!", role="assistant"),
343+
)
344+
345+
msg_dict = {"content": "Hello world!", "role": "assistant"}
346+
assert normalize_message(msg) == msg_dict
347+
assert normalize_message_chunk(msg) == msg_dict
348+
349+
336350
# ------------------------------------------------------------------------------------
337351
# Unit tests for as_provider_message()
338352
#
@@ -462,9 +476,7 @@ def test_as_ollama_message():
462476
import ollama
463477
from ollama import Message as OllamaMessage
464478

465-
assert "typing.Sequence[ollama._types.Message]" in str(
466-
ollama.chat.__annotations__["messages"]
467-
)
479+
assert "ollama._types.Message" in str(ollama.chat.__annotations__["messages"])
468480

469481
from shiny.ui._chat_provider_types import as_ollama_message
470482

0 commit comments

Comments
 (0)