Old response still displaying (faded)

If you’re creating a debugging post, please include the following info:

  1. running Locally
    Hi, I am having a problem where it will display the response again when another prompt is entered. This is for a RAG chatbot.

    Any advice? heres the main function of my code:
def chatbot_interface(self, index: VectorStoreIndex, saver: Saver) -> bool:
        """Display the chatbot interface and handle user interactions."""
        with st.sidebar:
            st.title("πŸžπŸ’¬ Chatbot")
            if st.button("Clear Chat History", key="clear_chat", type="primary"):
                st.session_state.messages = [{"role": "assistant", "content": "You have cleared the chat! How dare you...😠"}]
            new_index = self.upload_and_embed_file()
            if new_index is not None:
                print("new index generated!")
                st.success('File(s) have been uploaded!', icon="βœ…")
                st.snow() # trying it here instead
                st.session_state.index = new_index
        for message in st.session_state.messages:
            with st.chat_message(message["role"]):
                st.markdown(message["content"], unsafe_allow_html=True)
                if "source_files" in message and message["source_files"]:
                    st.write("Source Files:")
                    for file in message["source_files"]:
                        st.write(f"- {file}")
        if prompt := st.chat_input("Ask a Question"):
            st.session_state.messages.append({"role": "user", "content": prompt})
            with st.chat_message("user"):
                st.markdown(prompt, unsafe_allow_html=True)
            start_time = time.time()
        if st.session_state.messages[-1]["role"] != "assistant":
            with st.chat_message("assistant"):
                my_spinner= st.spinner("Thinking")
                with my_spinner:
                    # it is printing response again after a new prompt when using spinner()
                    response, source_files = self.get_llm_response(prompt)
                if source_files:
                    st.write("Source Files:")
                    for file in source_files:
                        st.write(f"- {file}")

                end_time = time.time()
                print(f"Latency: {end_time - start_time} seconds")
                message = {"role": "assistant", "content": response, "source_files": list(source_files)}
                # so this shows and user can enter, it just returns None... :(
                feedback_value = streamlit_feedback(feedback_type="thumbs", # can be "thumbs" or "faces"
                                    optional_text_label="[Optional] Enter your feedback here", 
                # then we can send the feedback into like evaluator maybe...
        return True