I want to develop a noob subscription logic. You get free 3 messages with chatGPT
Couldn’t figure out how the message history is carried on the UI in the doc
- accept the prompt from
chat_input
and show reponse
carry the message history in the UI and st.session_state["messages"]
import streamlit as st
import openai
import requests
import os
import random
import time
import uuid
messages_ls=[
{"role": "system", "content": "You are a helpful, pattern-following assistant."},
{"role": "user", "content": "Help me translate the following corporate jargon into plain English."},
{"role": "assistant", "content": "Sure, I'd be happy to!"},
{"role": "user", "content": "New synergies will help drive top-line growth."},
{"role": "assistant", "content": "Things working well together will increase revenue."},
{"role": "user", "content": "Let's circle back when we have more bandwidth to touch base on opportunities for increased leverage."},
{"role": "assistant", "content": "Let's talk later when we're less busy about how to do better."},
{"role": "user", "content": "This late pivot means we don't have time to boil the ocean for the client deliverable."},
]
openai.api_key = 'sk-I3xxxxxxhdfeh34l4gj4r4jffifen'
st.title("ChatGPT-like clone")
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = "gpt-3.5-turbo"
# d={} , st.session_state
if "messages" not in st.session_state:
st.session_state["messages"] = messages_ls
## to create list of tuple
## ask for creating a user id for session
user_id=uuid.uuid4().hex
# if started the chat -- provide 3 responses
if prompt:= st.chat_input(" "):
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
response=openai.ChatCompletion.create(model=st.session_state["openai_model"],messages=st.session_state["messages"],temperature=0.7,)
st.markdown(response["choices"][0]["message"]["content"])
st.session_state["messages"].append({"role":"assistant", "content":response["choices"][0]["message"]["content"]})