Skip to content

Commit 5454eb0

Browse files
authored
Fix(Chat): Always ensure Anthropic user message comes first (not just when token_limits are provided) (#1685)
1 parent 5419472 commit 5454eb0

File tree

3 files changed

+34
-17
lines changed

3 files changed

+34
-17
lines changed

CHANGELOG.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
99

1010
### Bug fixes
1111

12-
* Fixed a bug with `Chat()` sometimes silently dropping errors. (#1672)
13-
14-
* Fixed a bug with `Chat()` sometimes not removing it's loading icon (on error or a `None` transform). (#1679)
12+
* A few fixes for `ui.Chat()`, including:
13+
* Fixed a bug with `Chat()` sometimes silently dropping errors. (#1672)
14+
* Fixed a bug with `Chat()` sometimes not removing it's loading icon (on error or a `None` transform). (#1679)
15+
* `.messages(format="anthropic")` correctly removes non-user starting messages (once again). (#1685)
1516

1617
* `shiny create` now uses the template `id` rather than the directory name as the default directory. (#1666)
1718

shiny/templates/chat/production/anthropic/app.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,14 @@
5252

5353
@chat.on_user_submit
5454
async def _():
55-
messages = chat.messages(format="openai", token_limits=MODEL_INFO["token_limits"])
56-
response = await llm.chat.completions.create(
57-
model=MODEL_INFO["name"], messages=messages, stream=True
55+
messages = chat.messages(
56+
format="anthropic",
57+
token_limits=MODEL_INFO["token_limits"],
58+
)
59+
response = await llm.messages.create(
60+
model=MODEL_INFO["name"],
61+
messages=messages,
62+
stream=True,
63+
max_tokens=MODEL_INFO["token_limits"][1],
5864
)
5965
await chat.append_message_stream(response)

shiny/ui/_chat.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -471,6 +471,11 @@ def messages(
471471
"""
472472

473473
messages = self._messages()
474+
475+
# Anthropic requires a user message first and no system messages
476+
if format == "anthropic":
477+
messages = self._trim_anthropic_messages(messages)
478+
474479
if token_limits is not None:
475480
messages = self._trim_messages(messages, token_limits, format)
476481

@@ -868,17 +873,6 @@ def _trim_messages(
868873
messages2.append(m)
869874
n_other_messages2 += 1
870875

871-
# Anthropic doesn't support `role: system` and requires a user message to come 1st
872-
if format == "anthropic":
873-
if n_system_messages > 0:
874-
raise ValueError(
875-
"Anthropic requires a system prompt to be specified in it's `.create()` method "
876-
"(not in the chat messages with `role: system`)."
877-
)
878-
while n_other_messages2 > 0 and messages2[-1]["role"] != "user":
879-
messages2.pop()
880-
n_other_messages2 -= 1
881-
882876
messages2.reverse()
883877

884878
if len(messages2) == n_system_messages and n_other_messages2 > 0:
@@ -890,6 +884,22 @@ def _trim_messages(
890884

891885
return tuple(messages2)
892886

887+
def _trim_anthropic_messages(
888+
self,
889+
messages: tuple[TransformedMessage, ...],
890+
) -> tuple[TransformedMessage, ...]:
891+
892+
if any(m["role"] == "system" for m in messages):
893+
raise ValueError(
894+
"Anthropic requires a system prompt to be specified in it's `.create()` method "
895+
"(not in the chat messages with `role: system`)."
896+
)
897+
for i, m in enumerate(messages):
898+
if m["role"] == "user":
899+
return messages[i:]
900+
901+
return ()
902+
893903
def _get_token_count(
894904
self,
895905
content: str,

0 commit comments

Comments
 (0)