Hi so im trying to incorporate streamlit_feedback, but it seems like it just blows by the function and doesn’t do the “on_submit” part…
def chatbot_interface(self, index: VectorStoreIndex, saver: Saver) -> bool:
"""Display the chatbot interface and handle user interactions."""
with st.sidebar:
st.image("../resources/pic.png", width=200)
st.title("🍞💬 Chatbot")
if st.button("Clear Chat History", key="clear_chat", type="primary"):
st.session_state.messages = [{"role": "assistant", "content": "You have cleared the chat! How dare you...😠"}]
new_index = self.upload_and_embed_file()
if new_index is not None:
print("new index generated!")
st.success('File(s) have been uploaded!', icon="✅")
st.snow() # trying it here instead
st.session_state.index = new_index
if "chat_input" not in st.session_state:
st.session_state["chat_input"] = False
if "feedback" not in st.session_state:
st.session_state["feedback"] = False
for message in st.session_state.messages:
if message["role"] == "assistant":
avatar = "../resources/icon.png"
elif message["role"] == "user":
avatar = "../resources/human_user_icon.png"
with st.chat_message(message["role"], avatar=avatar):
st.markdown(message["content"], unsafe_allow_html=True)
if "source_files" in message and message["source_files"]:
st.write("Source Files:")
for file in message["source_files"]:
st.write(f"- {file}")
if prompt := st.chat_input("Ask a Question"): #, disabled=not prompt
st.session_state["chat_input"] = not st.session_state["chat_input"]
st.session_state.rsp_placeholder.empty() # clears old response
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user", avatar="../resources/human_user_icon.png"):
st.markdown(prompt, unsafe_allow_html=True)
start_time = time.time()
if st.session_state.messages[-1]["role"] != "assistant":
with st.chat_message("assistant", avatar="../resources/icon.png"):
with st.spinner("Thinking"):
response, source_files = self.get_llm_response(prompt)
st.session_state.rsp_placeholder = st.empty() # establish new container
full_response = ''
for letter in response:
full_response += letter
st.session_state.rsp_placeholder.markdown(full_response)
time.sleep(0.005)
st.session_state.rsp_placeholder.markdown(full_response)
if source_files:
st.write("Source Files:")
for file in source_files:
st.write(f"- {file}")
end_time = time.time()
print(f"Latency: {end_time - start_time} seconds")
message = {"role": "assistant", "content": full_response, "source_files": list(source_files)}
st.session_state.messages.append(message)
if st.session_state["chat_input"]:
# if st.button("feedback"):
st.session_state["feedback"] = not st.session_state["feedback"]
if fb := streamlit_feedback(feedback_type="thumbs",
optional_text_label="[Optional] Enter your feedback here",
align="flex-start",
key="feedback_key",
on_submit=self.handle_feedback):
print(fb)
print("no fb")
return True ```