Combining Container and Chat

Hi!

I have open-sourced a humble UI improvement for the st.chat elements: https://github.com/pierrelouisbescond/streamlit-chat-ui-improvement

I want to reposition the chat options I have implemented on top of the st.chat_input.
I was thinking of using containers but all my tests have failed so far…

So I’m looking for some to help me achieve this :sweat_smile:
Thanks!

Original code as on GitHub:

import json
import logging
import streamlit as st  # 1.34.0
import time
import tiktoken

from datetime import datetime

# from openai import OpenAI  # 1.30.1
from openai import AzureOpenAI  # 1.30.1

logger = logging.getLogger()
logging.basicConfig(encoding="UTF-8", level=logging.INFO)

st.set_page_config(page_title="Streamlit Chat Interface Improvement",
                   page_icon="🀩")

st.title("🀩 Improved Streamlit Chat UI")

# Secrets to be stored in /.streamlit/secrets.toml
# OPENAI_API_ENDPOINT = "https://xxx.openai.azure.com/"
# OPENAI_API_KEY = "efpgishhn2kwlnk9928avd6vrh28wkdj" (this is a fake key πŸ˜‰)

# To be used with standard OpenAI API
# client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])

# To be used with standard Azure OpenAI API
client = AzureOpenAI(
    azure_endpoint=st.secrets["OPENAI_API_ENDPOINT"],
    api_key=st.secrets["OPENAI_API_KEY"],
    api_version="2024-02-15-preview",
)


# This function logs the last question and answer in the chat messages
def log_feedback(icon):
    # We display a nice toast
    st.toast("Thanks for your feedback!", icon="πŸ‘Œ")

    # We retrieve the last question and answer
    last_messages = json.dumps(st.session_state["messages"][-2:])

    # We record the timestamp
    activity = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ": "

    # And include the messages
    activity += "positive" if icon == "πŸ‘" else "negative"
    activity += ": " + last_messages

    # And log everything
    logger.info(activity)


# Model Choice - Name to be adapter to your deployment
if "openai_model" not in st.session_state:
    st.session_state["openai_model"] = "gpt-35-turbo"

# Adapted from https://docs.streamlit.io/develop/tutorials/llms/build-conversational-apps
if "messages" not in st.session_state:
    st.session_state["messages"] = []

user_avatar = "πŸ‘©β€πŸ’»"
assistant_avatar = "πŸ€–"

# In case of rerun of the last question, we remove the last answer from st.session_state["messages"]
if "rerun" in st.session_state and st.session_state["rerun"]:

    st.session_state["messages"].pop(-1)

# We rebuild the previous conversation stored in st.session_state["messages"] with the corresponding emojis
for message in st.session_state["messages"]:
    with st.chat_message(
        message["role"],
        avatar=assistant_avatar if message["role"] == "assistant" else user_avatar,
    ):
        st.markdown(message["content"])

# A chat input will add the corresponding prompt to the st.session_state["messages"]
if prompt := st.chat_input("How can I help you?"):

    st.session_state["messages"].append({"role": "user", "content": prompt})

    # and display it in the chat history
    with st.chat_message("user", avatar=user_avatar):
        st.markdown(prompt)

# If the prompt is initialized or if the user is asking for a rerun, we
# launch the chat completion by the LLM
if prompt or ("rerun" in st.session_state and st.session_state["rerun"]):

    with st.chat_message("assistant", avatar=assistant_avatar):
        stream = client.chat.completions.create(
            model=st.session_state["openai_model"],
            messages=[
                {"role": m["role"], "content": m["content"]}
                for m in st.session_state["messages"]
            ],
            stream=True,
            max_tokens=300,  # Limited to 300 tokens for demo purposes
        )
        response = st.write_stream(stream)
    st.session_state["messages"].append({"role": "assistant", "content": response})

    # In case this is a rerun, we set the "rerun" state back to False
    if "rerun" in st.session_state and st.session_state["rerun"]:
        st.session_state["rerun"] = False

# If there is at least one message in the chat, we display the options
if len(st.session_state["messages"]) > 0:

    # We set the space between the icons thanks to a share of 100
    cols_dimensions = [7, 19.4, 19.3, 9, 8.6, 8.6, 28.1]
    col0, col1, col2, col3, col4, col5, col6 = st.columns(cols_dimensions)

    with col1:

        # Converts the list of messages into a JSON Bytes format
        json_messages = json.dumps(st.session_state["messages"]).encode("utf-8")

        # And the corresponding Download button
        st.download_button(
            label="πŸ“₯ Save chat!",
            data=json_messages,
            file_name="chat_conversation.json",
            mime="application/json",
        )

    with col2:

        # We set the message back to 0 and rerun the app
        # (this part could probably be improved with the cache option)
        if st.button("Clear Chat 🧹"):
            st.session_state["messages"] = []
            st.rerun()

    with col3:
        icon = "πŸ”"
        if st.button(icon):
            st.session_state["rerun"] = True
            st.rerun()

    with col4:
        icon = "πŸ‘"

        # The button will trigger the logging function
        if st.button(icon):
            log_feedback(icon)

    with col5:
        icon = "πŸ‘Ž"

        # The button will trigger the logging function
        if st.button(icon):
            log_feedback(icon)

    with col6:

        # We initiate a tokenizer
        enc = tiktoken.get_encoding("cl100k_base")

        # We encode the messages
        tokenized_full_text = enc.encode(
            " ".join([item["content"] for item in st.session_state["messages"]])
        )

        # And display the corresponding number of tokens
        label = f"πŸ’¬ {len(tokenized_full_text)} tokens"
        st.link_button(label, "https://platform.openai.com/tokenizer")

else:

    # At the first run of a session, we temporarly display a message
    if "disclaimer" not in st.session_state:
        with st.empty():
            for seconds in range(3):
                st.warning(
                    "β€Ž You can click on πŸ‘ or πŸ‘Ž to provide feedback regarding the quality of responses.",
                    icon="πŸ’‘",
                )
                time.sleep(1)
            st.write("")
            st.session_state["disclaimer"] = True

Maybe this can help you

1 Like

Hi @pierrelouisbescond

I am the author of the package @Odrec recommended. His comment caught my attention and since you provided your code, I thought I would try my hand at a quick solution.

I am happy to say that I was able to do it by adding only 3 lines to your code and changing one line!

...
from datetime import datetime
+from streamlit_float import *

# If there is at least one message in the chat, we display the options
if len(st.session_state["messages"]) > 0:

+   action_buttons_container = st.container()
+   action_buttons_container.float("bottom: 6.8rem;")
    
    # We set the space between the icons thanks to a share of 100
    cols_dimensions = [7, 19.4, 19.3, 9, 8.6, 8.6, 28.1]
-   col0, col1, col2, col3, col4, col5, col6 = st.columns(cols_dimensions)
+   col0, col1, col2, col3, col4, col5, col6 = action_buttons_container.columns(cols_dimensions)

One nitpicky thing for me is the spacing between the chat area and the buttons and my solution to that would be to add a little css to increase the bottom margin of the chat area. To make the spacing better I would change:

-   action_buttons_container.float("bottom: 6.8rem;")
+   action_buttons_container.float("bottom: 6.9rem;background-color: var(--default-backgroundColor); padding-top: 1rem;")

and add an empty st.write after the chat messages (not the most elegant but is simpler than adding more css). Right before the if statement is where I put it.


+st.write("")
# If there is at least one message in the chat, we display the options
if len(st.session_state["messages"]) > 0:
3 Likes

Hi @bouzidanas,
This is awesome and I find the result already perfect:
20240601-streamlit-chat-improvement-50

I updated my GitHub project and credited you :grinning: :pray:
Thanks a lot!

2 Likes

This topic was automatically closed 2 days after the last reply. New replies are no longer allowed.