I’m trying to get a ReAct agent from llamaindex to work with streamlit, but the memory/chat history seems to not work inside the context blocks.
ReAct Agent Docs
Here’s a bare bones example:
def multiply(a: int, b: int) -> int:
"""Multiply two integers and returns the result integer"""
return a * b
def add(a: int, b: int) -> int:
"""Add two integers and returns the result integer"""
return a + b
multiply_tool = FunctionTool.from_defaults(fn=multiply)
add_tool = FunctionTool.from_defaults(fn=add)
st.title("Agent Dingus")
llm = BedrockConverse(
model="anthropic.claude-3-5-sonnet-20240620-v1:0",
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
region_name="us-east-1",
)
tools = [add_tool, multiply_tool]
agent = ReActAgent.from_tools(tools=tools, llm=llm, verbose=True)
with st.sidebar:
st.title('Agent Dingus')
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("What is up?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
message_placeholder = st.empty()
with st.spinner() as s:
response = agent.chat(prompt)
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})