I struggle with chat_message positioning.
I have two forms that should inform a bot conversation. I don’t want to use st.chat_input as I want to send semi-structured information to the OpenAI GPT.
My problem is that when I add messages after the form submitting, the new chat_message appear after my forms. I would like them to appear in the chat container.
Any idea about how to position my answers well? Using a container? Declaring the chatbox before?
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = "gpt-4"
if "messages" not in st.session_state:
st.session_state.messages = []
st.session_state.messages.append({"role": "assistant", "content": "This is my first message ! ☀️"})
for message in st.session_state.messages:
chatBox = st.chat_message(message["role"])
chatBox.markdown(message["content"])
def sendBotMessage(msg, prompt):
st.session_state.messages.append({"role": "user", "content": msg})
with st.chat_message("user"):
st.markdown(msg)
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
for response in openai.ChatCompletion.create(
model=st.session_state["openai_model"],
messages=[
{"role": m["role"], "content": prompt + m["content"]}
for m in st.session_state.messages
],
stream=True,
):
full_response += response.choices[0].delta.get("content", "")
message_placeholder.markdown(full_response + "▌")
message_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})
st.divider()
with st.container():
col1, col2 = st.columns(2)
with col1:
st.write("Form 1")
with st.form('survey'):
questions = st.text_area('Input your questions')
submit = st.form_submit_button('Send 1')
if submit:
sendBotMessage(questions, promptGPT1)
with col2:
st.write("Form 2")
with st.form('surveyTheme'):
surveyToCreate = st.text_area('Blabla')
submitForm2 = st.form_submit_button('Send 2')
if submitForm2:
sendBotMessage(surveyToCreate, promptGPT2)
This might not be the exact right behavior, but the basic idea should work – you can create a container for holding the chat responses, and always use that container when writing messages, like this
import streamlit as st
import openai
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = "gpt-4"
if "messages" not in st.session_state:
st.session_state.messages = []
st.session_state.messages.append(
{"role": "assistant", "content": "This is my first message ! ☀️"}
)
promptGPT1 = "This is a prompt"
promptGPT2 = "This is a different prompt"
chat_container = st.empty()
for message in st.session_state.messages:
with chat_container.container():
chatBox = st.chat_message(message["role"])
chatBox.markdown(message["content"])
def sendBotMessage(msg, prompt):
st.session_state.messages.append({"role": "user", "content": msg})
with chat_container.container():
with st.chat_message("user"):
st.markdown(msg)
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
for response in openai.ChatCompletion.create(
model=st.session_state["openai_model"],
messages=[
{"role": m["role"], "content": prompt + m["content"]}
for m in st.session_state.messages
],
stream=True,
):
full_response += response.choices[0].delta.get("content", "")
message_placeholder.markdown(full_response + "▌")
message_placeholder.markdown(full_response)
st.session_state.messages.append(
{"role": "assistant", "content": full_response}
)
st.divider()
with st.container():
col1, col2 = st.columns(2)
with col1:
st.write("Form 1")
with st.form("survey"):
questions = st.text_area("Input your questions")
submit = st.form_submit_button("Send 1")
if submit:
sendBotMessage(questions, promptGPT1)
with col2:
st.write("Form 2")
with st.form("surveyTheme"):
surveyToCreate = st.text_area("Blabla")
submitForm2 = st.form_submit_button("Send 2")
if submitForm2:
sendBotMessage(surveyToCreate, promptGPT2)
File “C:\Users\nick\streamlit_chat_openai\app.py”, line 142, in
full_response += response.choices[0].delta.get(“content”, “”)
~~~~~~~~~~~~~~~~^^^
IndexError: list index out of range
My guess is that Azure OpenAI has a different response format – I would try debugging by doing st.write(response) or st.help(response) to see more about what the “response” object contains.