Display bug only on second question

Hey guys i am almost finished building my app but I have one more bug.
I display sources with mention() and whenever i ask the second question the sources from the first question are displayed in the question field of the second question until the answer finished generating.

It looks like this:

I get the sources from a socket server in the right format. After this bug appears on the second question everything works perfectly fine.Can someone help me fix this small issue?
Here is my code:

import streamlit as st
import json
from streamlit_feedback import streamlit_feedback
import uuid
from langfuse import Langfuse
from streamlit_extras.mention import mention
from llm_api_server.client import llm_api_reqeust
from utils import *

class StreamlitApp:
    def __init__(self, langfuse) -> None:
        self.langfuse = langfuse

        st.set_page_config(
            page_title="SICBERT | Siemens Chatbot",
            page_icon="images/siemens_logo.png",
            layout="centered",
        )

        st.title("SICBERT Siemens-Chatbot")

        self.version = st.selectbox(
            label="Select your system version",
            options=["1", "2", "3"],
            index=None,
            placeholder="Select your system version",
            key="version",
            label_visibility="collapsed"
        )

        if 'question_state' not in st.session_state:
            st.session_state.question_state = False

        if 'fbk' not in st.session_state:
            st.session_state.fbk = str(uuid.uuid4())

        if "chat_history" not in st.session_state:
            st.session_state.chat_history = []

    def display_chat_history(self):
        for entry in st.session_state.chat_history:
            with st.chat_message("human"):
                st.write(entry["question"])
            with st.chat_message("ai", avatar='images/huber.png'):
                st.write(''.join(entry["answer"]))
                for source in entry.get("sources", []):
                    mention(label=source['title'].replace("_", " "), url=source['source'])

    def display_answer(self, question):
        if question is None:
            return
        message_id = len(st.session_state.chat_history)

        answer_str = []

        with st.chat_message("human"):
            st.write(question)

        with st.chat_message("ai", avatar='images/huber.png'):
            response_generator = llm_api_reqeust("localhost", 8080, question)
            self.sources_str = ""
            ret_sources = False

            def chunk_generator():
                nonlocal ret_sources
                for chunk in response_generator:
                    print(chunk.replace("</s>", ""))
                    if chunk.startswith("~data~") or ret_sources:
                        ret_sources = True
                        self.sources_str += chunk
                    else:
                        yield chunk

            st.write_stream(copy_generator(chunk_generator(), answer_str))

            sources = json.loads(self.sources_str[len("~data~"):])
            for source in sources:
                mention(label=source['title'].replace("_", " "), url=source['source'])
            print(''.join(answer_str))

        trace = self.langfuse.trace(
            name="user-interaction",
            input={"question": question},
            version=self.version,
            user_id=str(uuid.uuid4()),
            output={"answer": ''.join(answer_str)},
        )

        for i, source in enumerate(sources):
            trace.update(
                metadata={
                    f"file_name_{i}": source['title'], 
                    f"url_name_{i}": source['source'], 
                    f"score_{i}": source['score']
                }
            )

        st.session_state.chat_history.append({
            "question": question,
            "answer": answer_str,
            "sources": sources,
            "message_id": message_id,
            "trace_id": trace.id,
            "feedback": None
        })

    def fbcb(self, response):
        if st.session_state.chat_history:
            last_entry = st.session_state.chat_history[-1]
            trace_id = last_entry.get("trace_id")
            if trace_id:
                self.langfuse.score(
                    trace_id=trace_id,
                    name="user-feedback",
                    value=1 if response['score'] == 'πŸ‘' else 0,
                    comment=response['text'],
                )
            last_entry['feedback'] = response

        st.session_state.fbk = str(uuid.uuid4())

    def run(self):
        if not st.session_state.question_state:
            with st.chat_message("ai", avatar='images/huber.png'):
                st.write("Hello, how can I help you?")

        self.display_chat_history()

        question = st.chat_input(placeholder="Chat with SICBERT!")
        if question:
            st.session_state.question_state = True

        if st.session_state.question_state:
            self.display_answer(question)
            streamlit_feedback(
                feedback_type="thumbs",
                optional_text_label="Please, provide an explanation.",
                align="flex-start",
                key=st.session_state.fbk,
                on_submit=self.fbcb
            )

if __name__ == "__main__":
    langfuse = Langfuse(
        secret_key="sk-lf-128d982a-27c3-4e71-a916-f01ee4d6f549",
        public_key="pk-lf-4e81f78d-b800-40a7-b89c-c16a3c4ab3ea",
        host="http://localhost:3001"
    )

    StreamlitApp(langfuse).run()