Trying to pickle the embeddings and generate the answer but the way answer is generated is not proper

hi i am trying to work on the chatbot and i have the answer generated something like this

i have tried to pickle the embedding and the code is as follows:

file_path = "vectorStore.pkl"
pdf_file_path = "legal_women.pdf"
embedding_file_path = "embeddings.pkl"

# Load  embeddings
if os.path.exists(embedding_file_path):
    with open(embedding_file_path, "rb") as f:
        embeddings = pickle.load(f)
else:
    embeddings = HuggingFaceEmbeddings()

    # Pickle embeddings
    with open(embedding_file_path, "wb") as f:
        pickle.dump(embeddings, f)

# Text splitter
text_splitter = CharacterTextSplitter(
    separator="\n",
    chunk_size=1000,
    chunk_overlap=200
)

# Load PDF and split into documents
loader = PDFPlumberLoader(pdf_file_path)
pages = loader.load()
docs = text_splitter.split_documents(pages)
db = Chroma.from_documents(docs, embeddings)
# Pickle the extracted documents
with open(file_path, "wb") as f:
    pickle.dump(docs, f)

prompt = hub.pull("rlm/rag-prompt", api_url="https://api.hub.langchain.com")


def model(user_query, max_length, temp):
    if os.path.exists(file_path):
        with open(file_path, "rb") as f:
            vectorstore = pickle.load(f)
    repo_id = 'mistralai/Mistral-7B-Instruct-v0.2'
    llm = HuggingFaceHub(
        repo_id=repo_id, model_kwargs={"max_length": max_length, "temperature": temp})
    qa = RetrievalQA.from_chain_type(llm=llm,
                                     chain_type="stuff",
                                     retriever=db.as_retriever(k=2),
                                     return_source_documents=True,
                                     verbose=True,
                                     chain_type_kwargs={"prompt": prompt})
    return qa(user_query)["result"]

can u help me solve the correct way of generation of answer.