How to capture the feedback effectively

Hi,

I am working on chat bot project in my organization and i am stuck in one issue from couple of days. Below is the requirement of my project.

  • user should ask the question from frontend and get the answer back
  • chat history need to be captured and shown in UI
  • for latest question and answer pair, we need to show the feedback button (thumbs up and down) and need to capture the feedback in the backend.
  • if there is no feedback given by user, the feedback should be captured as N/A

i managed to code till first 2 points but i am stuck in last 2 points. Since i can share the organization code i have given the example code below. from that code when i click on feedback button, the screen goes blank.

import streamlit as st
from streamlit_feedback import streamlit_feedback


def display_answer():
    for i in st.session_state.chat_history:
        with st.chat_message("human"):
            st.write(i["question"])
        with st.chat_message("ai"):
            st.write(i["answer"])


def create_answer(question):
    if "chat_history" not in st.session_state:
        st.session_state.chat_history = []

    message_id = len(st.session_state.chat_history)

    st.session_state.chat_history.append(
        {
            "question": question,
            "answer": f"{question}_Answer",
            "message_id": message_id,
        }
    )


if question := st.chat_input(placeholder="Ask your question here .... !!!!"):
    create_answer(question)
    display_answer()

    if feedback := streamlit_feedback(feedback_type="thumbs", align="flex-start"):
        print(feedback)

I tried it and I cannot solve the issue.

Instead I just create a simple 2 buttons with emojis. They are using callbacks.

Full demo code

import streamlit as st

def display_answer():
    for i in st.session_state.chat_history:
        with st.chat_message("human"):
            st.write(i["question"])
        with st.chat_message("ai"):
            st.write(i["answer"])
        if "feedback" in i:
            st.write(f"Feedback: {i['feedback']}")

def create_answer(question):
    if "chat_history" not in st.session_state:
        st.session_state.chat_history = []

    message_id = len(st.session_state.chat_history)

    st.session_state.chat_history.append({
        "question": question,
        "answer": f"{question}_Answer",
        "message_id": message_id,
    })

def fbcb(feedback):
    message_id = len(st.session_state.chat_history) - 1
    if message_id >= 0:
        st.session_state.chat_history[message_id]["feedback"] = feedback

if "chat_history" not in st.session_state:
    st.session_state.chat_history = []

if question := st.chat_input(placeholder="Ask your question here .... !!!!"):
    create_answer(question)
    display_answer()

    cols = st.columns([0.1, 1, 1, 6])
    with cols[1]:
        st.button(':thumbsup:', on_click=fbcb, args=('Positive',), key='thumbsup')
    with cols[2]:
        st.button(':thumbsdown:', on_click=fbcb, args=('Negative',), key='thumbsdown')

I got the solution now.

A key can be supplied to the streamlit_feedback() and this key can access the feedback value.

streamlit_feedback(feedback_type="thumbs", align="flex-start", key='fb_k')

The feedback can be completed in two ways. One, the user has to press the button represented by a thumb. Second if optional text is added, the feedback can be completed by 2 button clicks selecting the thumb and adding text.

This situation makes the chat app a little bit complicated because when the feedback button is clicked, streamlit will rerun the code from top to bottom. Now if there is a text option, it will rerun twice. To minimize this complication, we will use the form, so that there is only 1 exit to capture the feedback.

if question := st.chat_input(placeholder="Ask your question here .... !!!!"):
    create_answer(question)
    display_answer()

    # This one.
    with st.form('form'):
        streamlit_feedback(feedback_type="thumbs", align="flex-start", key='fb_k')
        st.form_submit_button('Save feedback', on_click=fbcb)

We can do anything of what the feedback is asking, once done, press the form button.

We also use the callback function fbcb from click=fbcb in the form button and this is where we update the history of the feedback.

def fbcb():
    message_id = len(st.session_state.chat_history) - 1
    if message_id >= 0:
        st.session_state.chat_history[message_id]["feedback"] = st.session_state.fb_k
    display_answer()

The value of the feedback can be accessed thru the key from the session_state.

st.session_state.fb_k

Sample output

Full code

import streamlit as st
from streamlit_feedback import streamlit_feedback


if "chat_history" not in st.session_state:
    st.session_state.chat_history = []


def display_answer():
    for i in st.session_state.chat_history:
        with st.chat_message("human"):
            st.write(i["question"])
        with st.chat_message("ai"):
            st.write(i["answer"])

        # If there is no feedback show N/A
        if "feedback" in i:
            st.write(f"Feedback: {i['feedback']}")
        else:
            st.write("Feedback: N/A")

def create_answer(question):
    if "chat_history" not in st.session_state:
        st.session_state.chat_history = []

    message_id = len(st.session_state.chat_history)

    st.session_state.chat_history.append({
        "question": question,
        "answer": f"{question}_Answer",
        "message_id": message_id,
    })


def fbcb():
    message_id = len(st.session_state.chat_history) - 1
    if message_id >= 0:
        st.session_state.chat_history[message_id]["feedback"] = st.session_state.fb_k
    display_answer()


if question := st.chat_input(placeholder="Ask your question here .... !!!!"):
    create_answer(question)
    display_answer()

    with st.form('form'):
        streamlit_feedback(feedback_type="thumbs", align="flex-start", key='fb_k')
        st.form_submit_button('Save feedback', on_click=fbcb)

Others

The streamlit_feedback function has a parameter called on_submit. This can also be used to get the feedback. I tried to apply that but did not work. But it will work in other situation.

Reference

Thank you for provided solution. I was able to add streamlit-feedback into my chatbot app via st.form:

def handle_feedback():  
    st.write(st.session_state.fb_k)
    st.toast("โœ”๏ธ Feedback received!")

....

        with st.form('form'):
            streamlit_feedback(feedback_type="thumbs",
                                optional_text_label="[Optional] Please provide an explanation", 
                                align="flex-start", 
                                key='fb_k')
            st.form_submit_button('Save feedback', on_click=handle_feedback)

It works but there two problems:

  1. To get it work user first need click on SUBMIT and only then to โ€œSave feedbackโ€.
    image
    If user click โ€œSave feedbackโ€ then st.session_state.fb_k will be None

  2. Feedback inside st.form does not look very good and I am looking to ways to get rid of st.form but still have the same functionaly.

Thank you for your help.

Full app code:

from langchain.chat_models import AzureChatOpenAI
from langchain.memory import ConversationBufferWindowMemory # ConversationBufferMemory
from langchain.agents import ConversationalChatAgent, AgentExecutor, AgentType
from langchain.callbacks import StreamlitCallbackHandler
from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
from langchain.agents import Tool
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
import pprint
import streamlit as st
import os
import pandas as pd
from streamlit_feedback import streamlit_feedback

def handle_feedback():  
    st.write(st.session_state.fb_k)
    st.toast("โœ”๏ธ Feedback received!")

  
os.environ["OPENAI_API_KEY"] = ...
os.environ["OPENAI_API_TYPE"] = "azure"
os.environ["OPENAI_API_BASE"] = ...
os.environ["OPENAI_API_VERSION"] = "2023-08-01-preview"


@st.cache_data(ttl=72000)
def load_data_(path):
    return pd.read_csv(path) 

uploaded_file = st.sidebar.file_uploader("Choose a CSV file", type="csv")
if uploaded_file is not None:
    # If a file is uploaded, load the uploaded file
    st.session_state["df"] = load_data_(uploaded_file)


if "df" in st.session_state:

    msgs = StreamlitChatMessageHistory()
    memory = ConversationBufferWindowMemory(chat_memory=msgs, 
                                            return_messages=True, 
                                            k=5, 
                                            memory_key="chat_history", 
                                            output_key="output")
    if len(msgs.messages) == 0 or st.sidebar.button("Reset chat history"):
        msgs.clear()
        msgs.add_ai_message("How can I help you?")
        st.session_state.steps = {}
    avatars = {"human": "user", "ai": "assistant"}
    for idx, msg in enumerate(msgs.messages):
        with st.chat_message(avatars[msg.type]):
            # Render intermediate steps if any were saved
            for step in st.session_state.steps.get(str(idx), []):
                if step[0].tool == "_Exception":
                    continue
                # Insert a status container to display output from long-running tasks.
                with st.status(f"**{step[0].tool}**: {step[0].tool_input}", state="complete"):
                    st.write(step[0].log)
                    st.write(step[1])
            st.write(msg.content)


    if prompt := st.chat_input(placeholder=""):
        st.chat_message("user").write(prompt)

        llm = AzureChatOpenAI(
                        deployment_name = "gpt-4",
                        model_name = "gpt-4",
                        openai_api_key = os.environ["OPENAI_API_KEY"],
                        openai_api_version = os.environ["OPENAI_API_VERSION"],
                        openai_api_base = os.environ["OPENAI_API_BASE"],
                        temperature = 0, 
                        streaming=True
                        )

        prompt_ = PromptTemplate(
            input_variables=["query"],
            template="{query}"
        )
        chain_llm = LLMChain(llm=llm, prompt=prompt_)
        tool_llm_node = Tool(
            name='Large Language Model Node',
            func=chain_llm.run,
            description='This tool is useful when you need to answer general purpose queries with a large language model.'
        )

        tools = [tool_llm_node] 
        chat_agent = ConversationalChatAgent.from_llm_and_tools(llm=llm, tools=tools)

        executor = AgentExecutor.from_agent_and_tools(
                                                        agent=chat_agent,
                                                        tools=tools,
                                                        memory=memory,
                                                        return_intermediate_steps=True,
                                                        handle_parsing_errors=True,
                                                        verbose=True,
                                                    )
        

        with st.chat_message("assistant"):            
            
            st_cb = StreamlitCallbackHandler(st.container(), expand_new_thoughts=False)
            response = executor(prompt, callbacks=[st_cb, st.session_state['handler']])
            st.write(response["output"])
            st.session_state.steps[str(len(msgs.messages) - 1)] = response["intermediate_steps"]
            response_str = f'{response}'
            pp = pprint.PrettyPrinter(indent=4)
            pretty_response = pp.pformat(response_str)
              

        with st.form('form'):
            streamlit_feedback(feedback_type="thumbs",
                                optional_text_label="[Optional] Please provide an explanation", 
                                align="flex-start", 
                                key='fb_k')
            st.form_submit_button('Save feedback', on_click=handle_feedback)