My chatbot app persists conversation even when page is reloaded, or it is opened in a new tab.
My app is deployed behind a nginx reverse proxy for internal usage. It is a multipage app and I’m getting issues in one of the pages which has a simple chatbot. I am using streamlit version ‘1.38.0’
When I restart the app for a while it works as expected and on reloading the tab/opening url in another tab/incognito window a fresh session is presented however when I try the app after some time (usually when I test it again the next day) it persists conversations. So if I send a message and reload the page the chats are present. Even if I open the tab in incognito the chats are present. I don’t understand what’s happening at all especially given it works for some time before showing this issue. Since this is a multipage app I thought maybe the error stems from some session state name being the same across pages and hence I prepended a string to all session state variables in this page and yet I see this issue. Can someone help me figure out what’s going wrong
None of my other app pages show this kind of behavior (and they are not chatbots)
I looked for similar errrors in the forum and some pointed to this arising when session_state is set to {}
but I’m not doing that. I am also not importing something which adds stuff to the session state.
Here’s the entire code -
SYS_PROMPT = """You are an AI assistant.
Your role is to assist users by answering their questions, providing information, and offering guidance on a wide range of topics. Your responses should be clear, accurate, and concise. Prioritize helpfulness, while maintaining a friendly and professional tone.
Follow these guidelines:
- Understand the user’s intent and provide relevant information or solutions.
- Clarify or ask follow-up questions if the user’s request is unclear.
- Stay neutral and avoid opinions or judgments.
- You may ask questions when needed to gather context or offer better assistance."""
import streamlit as st
import requests
from datetime import datetime
import json
import base64
st.set_page_config(layout="wide",
page_title="MaxChat",
page_icon="💬",
)
if 'MAXCHAT_system_prompt' not in st.session_state:
st.session_state['MAXCHAT_system_prompt'] = SYS_PROMPT
models_available = ["meta-llama/Llama-3.1-70B-Instruct"]
if 'MAXCHAT_model_chosen' not in st.session_state:
st.session_state['MAXCHAT_model_chosen'] = models_available[0]
if 'MAXCHAT_data_loaded' not in st.session_state:
st.session_state['MAXCHAT_data_loaded'] = False
def get_current_datetime():
# Get the current date and time
now = datetime.now()
# Format it as DD MM YY HH MM SS
formatted_datetime = now.strftime("%d_%m_%y_%H_%M_%S")
return formatted_datetime
if "MAXCHAT_messages" not in st.session_state:
st.session_state['MAXCHAT_messages'] = []
if "MAXCHAT_disable_sidebar" not in st.session_state:
st.session_state['MAXCHAT_disable_sidebar'] = False
st.html("<h1 style='text-align: center;'>💬 MaxChat</h1>")
def download_session_state():
session_state = st.session_state
# st.write(session_state)
download_json = {
'messages':session_state['MAXCHAT_messages'],
'system_prompt':session_state['MAXCHAT_system_prompt'],
'model_chosen':session_state['MAXCHAT_model_chosen']
}
download_json = json.dumps(dict(download_json), indent=4)
st.download_button(
label="Download Chat",
data=download_json,
file_name=f'chat_{get_current_datetime()}.maxchat',
mime="application/json",
use_container_width=True
)
c1,c2= st.columns([1,1])
with c1:
if 'MAXCHAT_clicked' not in st.session_state:
st.session_state['MAXCHAT_clicked'] = False
def set_clicked():
st.session_state['MAXCHAT_clicked'] = not st.session_state['MAXCHAT_clicked']
tooltip_message = ""
if len(st.session_state['MAXCHAT_messages']) > 0:
tooltip_message = "Resume Chat is unavailable during an active conversation."
st.button('Resume Chat', on_click=set_clicked, use_container_width=True, disabled = len(st.session_state['MAXCHAT_messages']) > 0, help=tooltip_message)
if st.session_state['MAXCHAT_clicked']:
uploaded_file = st.file_uploader("Upload your MaxChat file to resume a session", type = 'maxchat')
print(uploaded_file)
if uploaded_file is not None and st.session_state['MAXCHAT_data_loaded'] == False:
data = json.loads(uploaded_file.getvalue())
st.session_state['MAXCHAT_messages'] = data['messages'].copy()
st.session_state['MAXCHAT_system_prompt'] = data['system_prompt']
st.session_state['MAXCHAT_disable_sidebar'] = True
st.session_state['MAXCHAT_model_chosen'] = data['model_chosen']
st.session_state['MAXCHAT_data_loaded'] = True
with c2:
download_session_state()
# Initialize chat history
def merge_consecutive_messages(messages):
merged_messages = []
# Initialize with the first message
current_message = messages[0]
for msg in messages[1:]:
# If the type matches, concatenate the content
if msg['role'] == current_message['role']:
current_message['content'] += ' ' + msg['content']
else:
# If the type doesn't match, append the current message to the result list
merged_messages.append(current_message)
current_message = msg
# Append the last message
merged_messages.append(current_message)
return merged_messages
def create_completion():
# Merging if ever the user sends multiple messages before model replies.
current_messages = merge_consecutive_messages(st.session_state['MAXCHAT_messages'].copy())
old_messages = []
idx = 0
curr_m = {}
for elem in current_messages[:-1]:
if idx % 2 == 0:
curr_m['user_prompt'] = elem['content']
else:
curr_m['assistant_response'] = elem['content']
old_messages.append(curr_m)
curr_m = {}
idx += 1
new_message = current_messages[-1]['content']
request_data = {
'type': 'testing',
'max_tokens': 20000,
'system_prompt': st.session_state['MAXCHAT_system_prompt'],
"old_messages": old_messages,
"new_message": new_message,
'temperature': 0.75,
'top_p': 0.5,
}
url = 'ENDPOINT_URL'
response = requests.post(url, json=request_data)
response.raise_for_status() # Raises an HTTPError for bad responses
response_json = response.json()
return response_json["response"]
def download_chat():
pass
def toggle_system_prompt_callback():
if 'MAXCHAT_toggle_system_prompt' not in st.session_state:
st.session_state['MAXCHAT_toggle_system_prompt'] = False
else:
st.session_state['MAXCHAT_toggle_system_prompt'] = not st.session_state['MAXCHAT_toggle_system_prompt']
with st.sidebar:
index_of_model = models_available.index(st.session_state['MAXCHAT_model_chosen'])
model_chosen_by_user = st.selectbox(
"Choose Model",
models_available,
index = index_of_model
)
st.session_state['MAXCHAT_model_chosen'] = model_chosen_by_user
if st.session_state['MAXCHAT_disable_sidebar']:
# toggle_system_prompt = st.toggle("Edit System Prompt", False, on_change=toggle_system_prompt_callback, disabled=True)
st.session_state['MAXCHAT_system_prompt'] = st.text_area("System Prompt:", st.session_state['MAXCHAT_system_prompt'] , height=300, disabled=True)
else:
toggle_system_prompt = st.toggle("Edit System Prompt", False, on_change=toggle_system_prompt_callback)
st.session_state['MAXCHAT_system_prompt'] = st.text_area("System Prompt:", st.session_state['MAXCHAT_system_prompt'] , disabled = not toggle_system_prompt, height=300)
# st.session_state['MAXCHAT_system_prompt'] = SYS_PROMPT
st.write("Important Reminder: Once a conversation has started, the system prompt cannot be changed. If you'd like to switch to a different prompt, please refresh the page to begin a new conversation.")
# Display chat messages from history on app rerun
for message in st.session_state['MAXCHAT_messages']:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# React to user input
if prompt := st.chat_input("Ask MaxChat"):
st.session_state['MAXCHAT_disable_sidebar'] = True
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
# Add user message to chat history
st.session_state['MAXCHAT_messages'].append({"role": "user", "content": prompt})
reply = create_completion()
response = reply
# Display assistant response in chat message container
with st.chat_message("assistant"):
st.markdown(response)
# Add assistant response to chat history
st.session_state['MAXCHAT_messages'].append({"role": "assistant", "content": response})
st.rerun()