Im trying to implement Langchain to the just launched chat elements. I followed the example they posted and I manipulated it to use langchain isntead of openai directly. However, the memory is not working even though I’m using session states to save the conversation. Here is my code
Code Snippet:
from langchain import OpenAI
from langchain.callbacks import get_openai_callback
from langchain.chains import ConversationChain
from langchain.chains.conversation.memory import ConversationSummaryMemory
from langchain.memory import ConversationBufferMemory
from langchain.memory import ChatMessageHistory
import streamlit.components.v1 as components
import openai
import streamlit as st
import os
st.title("bot")
openai.api_key = "OpenAI Key"
if "openai_model" not in st.session_state:
llm = OpenAI(
temperature=0,
openai_api_key= "xxxxxxxxxxxxxxxxxxxxx",
model_name="text-davinci-003"
)
st.session_state.conversation = ConversationChain(
llm=llm,
memory = ConversationBufferMemory(llm=llm),
)
if "messages" not in st.session_state:
st.session_state.messages = []
st.write(st.session_state.conversation)
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("What is up?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
llm_response = st.session_state.conversation.run(
prompt,
)
st.markdown(llm_response)
st.session_state.messages.append({"role": "assistant", "content": llm_response})