Problem with input variables displaying on streamlit

Tried a few different things and plugged my code into GPT-4 to try to fix. Not sure where I’m going wrong here?

import streamlit as st
import os
from langchain_core.messages import HumanMessage, AIMessage
from langchain_core.output_parsers import StrOutputParser
from apikey import openai_api_key
from langchain_core.prompts import ChatPromptTemplate
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_openai import OpenAIEmbeddings
from langchain.chains import LLMChain
from langchain_openai import ChatOpenAI
from langchain_core.documents import Document
from langchain_community.document_loaders import PyPDFLoader
from langchain.chains import create_retrieval_chain 
from langchain.memory import ConversationBufferWindowMemory
from langchain.chains import create_history_aware_retriever
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain.chains import ConversationChain
from langchain_community.vectorstores import FAISS

os.environ['OPENAI_API_KEY']=openai_api_key

if "chat_history" not in st.session_state:
    st.session_state.chat_history = []

st.set_page_config(page_title="bot")

st.title("bot")

# Doc loader
loader = PyPDFLoader("/Users/neilmcdevitt/VSCode Projects/Cashvertising-Free-PDF-Book.pdf")
docs = loader.load_and_split()

# Embeddings
embeddings = OpenAIEmbeddings()
db = FAISS.from_documents(docs, embeddings)

text_splitter = RecursiveCharacterTextSplitter()
documents = text_splitter.split_documents(docs)
vectorstore = FAISS.from_documents(documents, embeddings)



def retrieve_info(query):
   similar_response = db.similarity_search(query, k=3)
   page_contents_array = [doc.page_content for doc in similar_response]
   print(page_contents_array)
   return page_contents_array



# LLM model and memory
llm = ChatOpenAI(temperature=0, model="gpt-4-turbo-preview", max_tokens=450)
window_memory = ConversationBufferWindowMemory(k=5)
conversation_with_summary = ConversationChain(
   llm=llm, verbose=True, memory=window_memory
)

# Get response
def get_response(user_question, chat_history):
  template = f"""
        You're a marketing and business expert that helps users grow revenue for their subscription
        revenue community business.
        Your specialties are paid advertising and organic marketing on social media, integrating 
        feedback loops to improve product and improve marketing efficiencies, and reducing membership churn.
        Based on the User question, the Chat history, and the PDF document provided give your best response 
        in natural language.

        Chat history: {chat_history}
        User question: {user_question}
        
        """
  try:
      llm = ChatOpenAI()
  except Exception as err:
      raise Exception(f'2: {err}')

  try:
      prompt = ChatPromptTemplate.from_template(template)
      document_chain = create_stuff_documents_chain(llm, prompt)
  except Exception as err:
      raise Exception(f'1: {err}')
    
  parser = StrOutputParser()  
  chain = prompt | llm | parser

  retriever = vectorstore.as_retriever()
  retrieval_chain = create_retrieval_chain(retriever, document_chain)

  try:
        gr = retrieval_chain.invoke({
            "chat_history": chat_history,
            "user_question": user_question,
        })
  except Exception as err:
        raise Exception(f'3: {err}')

  return gr



# Conversation
for message in st.session_state.chat_history:
    if isinstance(message, HumanMessage):
        with st.chat_message("Human"):
            st.markdown(message.content)
    else:
        with st.chat_message("AI"):
            st.markdown(message.content)
            
user_query = st.chat_input("message here")
if user_query is not None and user_query != "":
     st.session_state.chat_history.append(HumanMessage(user_query))
    
     with st.chat_message("Human"):
          st.markdown(user_query)
        
     with st.chat_message("AI"):
         ai_response = get_response(user_query, st.session_state.chat_history)
         st.markdown(ai_response)
        
     st.session_state.chat_history.append(AIMessage(ai_response))

This topic was automatically closed 180 days after the last reply. New replies are no longer allowed.