Skip to content

Commit 9ff95ef

Browse files
authored
chore: Rename variables chat_model to chat_client (#1927)
1 parent 22745ae commit 9ff95ef

File tree

8 files changed

+18
-18
lines changed

8 files changed

+18
-18
lines changed

shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# them in a file named `.env`. The `python-dotenv` package will load `.env` as
1414
# environment variables which can be read by `os.getenv()`.
1515
load_dotenv()
16-
chat_model = ChatBedrockAnthropic(
16+
chat_client = ChatBedrockAnthropic(
1717
model="anthropic.claude-3-sonnet-20240229-v1:0",
1818
)
1919

@@ -32,5 +32,5 @@
3232
# Define a callback to run when the user submits a message
3333
@chat.on_user_submit
3434
async def handle_user_input(user_input: str):
35-
response = await chat_model.stream_async(user_input)
35+
response = await chat_client.stream_async(user_input)
3636
await chat.append_message_stream(response)

shiny/templates/chat/llm-enterprise/azure-openai/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the docs for more information on how to obtain one.
1313
# https://posit-dev.github.io/chatlas/reference/ChatAzureOpenAI.html
1414
load_dotenv()
15-
chat_model = ChatAzureOpenAI(
15+
chat_client = ChatAzureOpenAI(
1616
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
1717
endpoint="https://my-endpoint.openai.azure.com",
1818
deployment_id="gpt-4o-mini",
@@ -37,5 +37,5 @@
3737
# Define a callback to run when the user submits a message
3838
@chat.on_user_submit
3939
async def handle_user_input(user_input: str):
40-
response = await chat_model.stream_async(user_input)
40+
response = await chat_client.stream_async(user_input)
4141
await chat.append_message_stream(response)

shiny/templates/chat/llms/anthropic/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the docs for more information on how to obtain one.
1313
# https://posit-dev.github.io/chatlas/reference/ChatAnthropic.html
1414
load_dotenv()
15-
chat_model = ChatAnthropic(
15+
chat_client = ChatAnthropic(
1616
api_key=os.environ.get("ANTHROPIC_API_KEY"),
1717
model="claude-3-7-sonnet-latest",
1818
system_prompt="You are a helpful assistant.",
@@ -37,5 +37,5 @@
3737
# Generate a response when the user submits a message
3838
@chat.on_user_submit
3939
async def handle_user_input(user_input: str):
40-
response = await chat_model.stream_async(user_input)
40+
response = await chat_client.stream_async(user_input)
4141
await chat.append_message_stream(response)

shiny/templates/chat/llms/google/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the docs for more information on how to obtain one.
1313
# https://posit-dev.github.io/chatlas/reference/ChatGoogle.html
1414
load_dotenv()
15-
chat_model = ChatGoogle(
15+
chat_client = ChatGoogle(
1616
api_key=os.environ.get("GOOGLE_API_KEY"),
1717
system_prompt="You are a helpful assistant.",
1818
model="gemini-2.0-flash",
@@ -33,5 +33,5 @@
3333
# Generate a response when the user submits a message
3434
@chat.on_user_submit
3535
async def handle_user_input(user_input: str):
36-
response = await chat_model.stream_async(user_input)
36+
response = await chat_client.stream_async(user_input)
3737
await chat.append_message_stream(response)

shiny/templates/chat/llms/langchain/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# app, or set them in a file named `.env`. The `python-dotenv` package will load `.env`
1616
# as environment variables which can later be read by `os.getenv()`.
1717
load_dotenv()
18-
chat_model = ChatOpenAI(
18+
chat_client = ChatOpenAI(
1919
api_key=os.environ.get("OPENAI_API_KEY"),
2020
model="gpt-4o",
2121
)
@@ -38,5 +38,5 @@
3838
# Define a callback to run when the user submits a message
3939
@chat.on_user_submit
4040
async def handle_user_input(user_input: str):
41-
response = await chat_model.stream_async(user_input)
41+
response = await chat_client.stream_async(user_input)
4242
await chat.append_message_stream(response)

shiny/templates/chat/llms/ollama/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
# ChatOllama() requires an Ollama model server to be running locally.
1010
# See the docs for more information on how to set up a local Ollama server.
1111
# https://posit-dev.github.io/chatlas/reference/ChatOllama.html
12-
chat_model = ChatOllama(model="llama3.2")
12+
chat_client = ChatOllama(model="llama3.2")
1313

1414
# Set some Shiny page options
1515
ui.page_opts(
@@ -29,5 +29,5 @@
2929
# Generate a response when the user submits a message
3030
@chat.on_user_submit
3131
async def handle_user_input(user_input: str):
32-
response = await chat_model.stream_async(user_input)
32+
response = await chat_client.stream_async(user_input)
3333
await chat.append_message_stream(response)

shiny/templates/chat/llms/openai/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the docs for more information on how to obtain one.
1313
# https://posit-dev.github.io/chatlas/reference/ChatOpenAI.html
1414
load_dotenv()
15-
chat_model = ChatOpenAI(
15+
chat_client = ChatOpenAI(
1616
api_key=os.environ.get("OPENAI_API_KEY"),
1717
model="gpt-4o",
1818
system_prompt="You are a helpful assistant.",
@@ -37,5 +37,5 @@
3737
# Generate a response when the user submits a message
3838
@chat.on_user_submit
3939
async def handle_user_input(user_input: str):
40-
response = await chat_model.stream_async(user_input)
40+
response = await chat_client.stream_async(user_input)
4141
await chat.append_message_stream(response)

shiny/templates/chat/llms/playground/app.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,15 +63,15 @@ def get_model():
6363
}
6464

6565
if input.model() in models["openai"]:
66-
chat_model = ctl.ChatOpenAI(**model_params)
66+
chat_client = ctl.ChatOpenAI(**model_params)
6767
elif input.model() in models["claude"]:
68-
chat_model = ctl.ChatAnthropic(**model_params)
68+
chat_client = ctl.ChatAnthropic(**model_params)
6969
elif input.model() in models["google"]:
70-
chat_model = ctl.ChatGoogle(**model_params)
70+
chat_client = ctl.ChatGoogle(**model_params)
7171
else:
7272
raise ValueError(f"Invalid model: {input.model()}")
7373

74-
return chat_model
74+
return chat_client
7575

7676

7777
@reactive.calc

0 commit comments

Comments
 (0)