well I am running this streamlit app locally on my machine
well here is the whole project with 4 files:
app.py
import streamlit as st
from chat_history import load_chat_history, save_chat_history
from response_handeler import generate_response
from ui import display_chat, get_user_input,add_message_to_chat
from datetime import datetime
st.title("A.U.R.A.🫡")
if "messages" not in st.session_state:
st.session_state.messages = load_chat_history()
if "interactions" not in st.session_state:
st.session_state.interactions = []
display_chat(st.session_state.messages)
with st.sidebar:
temperature = st.slider("Set the temperature",min_value=0.0,max_value=1.0,value=0.5,step=0.1)
if st.button("Clear Chat"):
st.session_state.messages = []
st.session_state.interactions = []
save_chat_history([])
st.write(f"Current Temperature: ",temperature)
st.write(st.session_state)
if user_input := get_user_input():
now = datetime.now()
formatted_date_time = now.strftime("%Y-%m-%d %H:%M:%S")
st.session_state.interactions.append({"role": "user", "content": user_input})
add_message_to_chat("user",user_input)
response_placeholder = add_message_to_chat("ai","")
response = generate_response(user_input,response_placeholder,temperature)
st.session_state.interactions.append({"role": "ai", "content": response})
print(f"Current Interactions: {st.session_state.interactions}")
st.session_state.messages.append({"user":user_input,"ai":response,"date-time":formatted_date_time})
save_chat_history(st.session_state.messages)
chat_history.py
import shelve
def load_chat_history():
with shelve.open("chat_history.db") as db:
meassages = db.get("messages",[])
return meassages
def save_chat_history(messages):
with shelve.open ("chat_history.db") as db:
db['messages'] = messages
response_handeler.py
import streamlit as st
from langchain_ollama import OllamaLLM
from langchain_core.messages import HumanMessage, AIMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.callbacks import StreamlitCallbackHandler
import datetime
llm = OllamaLLM(model="gemma2:2b", callbacks=[StreamlitCallbackHandler(parent_container=st)])
prompt_template = ChatPromptTemplate.from_messages(
[
(
"system",
"You are AURA (Advanced User-centric Responsive Assistant), "
"a friendly assistant. The user's name is 'Aditya Pratap Singh'. "
"Provide concise, friendly, and helpful answers."
"date-time of the interaction is {date-time}"
),
MessagesPlaceholder(variable_name="chat_history"),
("human", "{input}"),
]
)
chain = prompt_template | llm
def generate_response(prompt, response_placeholder, new_temperature):
llm.temperature = new_temperature / 100
st.write(f"Temperature: {new_temperature}%")
chat_history = st.session_state.interactions
if not all(isinstance(message, dict) and "role" in message and "content" in message for message in chat_history):
chat_history = [] # Reset chat_history if invalid format
# Prepare input for the chain
input_data = {"input": prompt, "chat_history": st.session_state.interactions,"date-time": datetime.datetime.now()}
# Clear the placeholder to avoid previous responses
response_placeholder.empty()
# Generate response with streaming
response = chain.invoke(input_data)
# Display the full response in Streamlit
response_placeholder.markdown(response)
return response
ui.py
import streamlit as st
User_icon = "👤"
Gemma_icon = "🤖"
def display_chat(messages):
for message in messages:
with st.chat_message("user", avatar=User_icon):
st.markdown(message["user"])
with st.chat_message("ai", avatar=Gemma_icon):
st.markdown(message["ai"])
def get_user_input():
return st.chat_input("How can I help you?")
def add_message_to_chat(role,content):
if role == "user":
with st.chat_message("user", avatar=User_icon):
st.markdown(content)
elif role == "ai":
with st.chat_message("ai", avatar=Gemma_icon):
placeholder = st.empty()
return placeholder
When I send a message, “Bad message format Bad ‘setIn’ index 9 (should be between [0, 1])” always pops up, while both user message and ai message appears. Can you please help me?
my python version:Python 3.13.1
my streamlit version:Streamlit, version 1.41.1