-
Checked other resources
Commit to Help
Example Codefrom langchain_core.messages import SystemMessage
from langgraph.checkpoint import MemorySaver
from langgraph.prebuilt import create_react_agent
from langchain_community.agent_toolkits import SQLDatabaseToolkit
from langchain_openai import AzureChatOpenAI
from langchain_community.utilities.sql_database import SQLDatabase
# Testing locally
llm = = AzureChatOpenAI(...)
db = SQLDatabase.from_uri(..)
tools = SQLDatabaseToolkit(db=db, llm=llm).get_tools()
system_message = SystemMessage(content='....')
app = create_react_agent(llm, tools, messages_modifier=system_message, checkpointer=MemorySaver())
def response_generator(input):
config = {"configurable": {"thread_id": "1"}}
response = app.invoke({'messages':[("user", input)]}, config)['messages'][-1].content
return response
#test questions, app manage to read chat history
response_generator('what is the capital of france?')
response_generator('what is the first question')
# deploy it with streamlit
import streamlit as st
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
def response_generator(input):
config = {"configurable": {"thread_id": "1"}}
response = app.invoke({'messages':[("user", input)]}, config)['messages'][-1].content
st.session_state.messages.append({"role": "assisstant", "content": response})
return response
if prompt:= st.chat_input('How can I help you?'):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.write(prompt)
with st.chat_message("assistant"):
st.write(response_generator(prompt)) DescriptionI am an inexperienced developer and this is my first time raising question so sorry if I did something wrongly. I am building a LLM chatbot that can query db with langchain and streamlit. One issue that has bother me for days is that the chatbot could not read chat history, even though the chatbot on my IDE with similar code can read chat history. I was wondering if it's my code issue or there is a bug within the new langgraph agent ? I have tried incorporate StreamlitChatMessageHistory into my code as well but it doesn't work. Any help is much appreciated ! System Infolatest version of LangGraph and Streamlit |
Beta Was this translation helpful? Give feedback.
Replies: 2 comments 1 reply
-
Hello @tanhl30! I'm here to assist you with any bugs, questions, or contributions. Let's work together to solve the issue you're facing. To resolve the issue where your LLM chatbot, built with LangChain and Streamlit, is unable to access chat history when deployed with Streamlit, you can use the Here is an example of how you can integrate import streamlit as st
from langchain_community.chat_message_histories import StreamlitChatMessageHistory
# Initialize chat history
if 'chat_history' not in st.session_state:
st.session_state.chat_history = StreamlitChatMessageHistory(key="chat_messages")
# Function to add a message to the chat history
def add_message_to_history(user_message, bot_response):
st.session_state.chat_history.add_message({"type": "user", "content": user_message})
st.session_state.chat_history.add_message({"type": "ai", "content": bot_response})
# Function to get the chat history
def get_chat_history():
return st.session_state.chat_history.messages
# Streamlit app layout
st.title("LLM Chatbot with LangChain and Streamlit")
user_input = st.text_input("You: ", key="user_input")
if st.button("Send"):
if user_input:
# Here you would call your LLM to get the bot response
bot_response = "This is a placeholder response from the bot."
# Add messages to chat history
add_message_to_history(user_input, bot_response)
# Display chat history
chat_history = get_chat_history()
for message in chat_history:
st.write(f"{message['type']}: {message['content']}") This code initializes a By using |
Beta Was this translation helpful? Give feedback.
-
Turns out it was streamlit caching function https://docs.streamlit.io/develop/concepts/architecture/caching#stcache_resource |
Beta Was this translation helpful? Give feedback.
Turns out it was streamlit caching function
https://docs.streamlit.io/develop/concepts/architecture/caching#stcache_resource
https://discuss.streamlit.io/t/langchain-memory/38264