import streamlit as st
from transformers import AutoModelForCausalLM, PeftModel
Assuming the base model name is “facebook/llama-2-7b-chat” (replace if different)
base_model_name = “NousResearch/Llama-2-7b-chat-hf”
Load the base model
base_model = AutoModelForCausalLM.from_pretrained(base_model_name)
Load adapter weights from your file (replace with actual path)
adapter_path = “adapter_model.bin”
model = PeftModel.from_pretrained(base_model, adapter_path)
Define Streamlit app
def main():
st.title(“Mental Health Chatbot”)
st.write(“Welcome to the Mental Health Chatbot. Feel free to talk about your feelings and concerns.”)
Input text area for user input
user_input = st.text_input(“You:”, “”)
if st.button(“Ask”):
if user_input.strip() != “”:
st.write(“Bot:”)
with st.spinner(“Thinking…”):
# Generate response using the loaded model
generated_text = model.generate(
input_ids=tokenizer(user_input, return_tensors=“pt”)[“input_ids”],
max_length=100
)[0][“generated_text”]
st.write(generated_text)
else:
st.warning(“Please enter your message.”)
if name == “main”:
main(). The above is my code and i am getting error like this -