How to prevent auto scrolling to the bottom

I am using streamlit 1.40 version. How to prevent streamlit interface from automatically scrolling to the bottom when new chat information is being written.

It keeps scrolling to the bottom and I just want to be able to control the scrolling. Not automatically scroll to the bottom.

Adding code for reference

def general_chat(user_input):

    response_placeholder = st.empty()

    streamed_response = ""
    try:
        blocked_topics = "\n".join([f"Do not answer any questions about: {keyword}" for keyword in key_data])

        history_context = "\n".join(f"{chat['role']}: {chat['message']}" for chat in st.session_state.chat_history)

        prompt_input = (
            f"{history_context}\n"
            f"Instructions:\n"
            f"{blocked_topics}\n"  # Explicitly define what is off-limits
            f"User: {user_input}\n"
            f"Assistant:"
        )

        for chunk in llm.stream(prompt_input):  # Adjust to your LLM's streaming API
            streamed_response += chunk
            response_placeholder.write(streamed_response)
            time.sleep(0.1)

        st.session_state.chat_history.append({"role": "assistant", "message": streamed_response.strip()})
    except Exception as e:
        st.session_state.chat_history.append({"role": "assistant", "message": "Server busy. Please try again later."})
        logger.info(str(e))
def chat_page():
        """Render the chat page."""
        st.markdown(
            """
<style>
            .st-emotion-cache-1c7y2kd {
                flex-direction: row-reverse;
                text-align: right;
            }
</style>
        """,
            unsafe_allow_html=True,
        )
        # hide_streamlit_toolbar()
        col1, col2 = st.columns([6, 2])
        with col1:
            st.subheader('ThinkBot')
            st.write('Welcome to ThinkBot! I am an AI Chatbot. Choose "General" for general inquiries or '
                     'select a specific project for project-related questions.')


        # Initialize chat history in session state if not present
        if 'chat_history' not in st.session_state:
            st.session_state.chat_history = []
            st.session_state.chat_history.append({"role": "assistant", "message": "How can I help you?"})
        # Display chat history
        # Display chat history with alignment
        bot_image = image_to_base64("images/chat_profile-bot.png")
        me_image = image_to_base64("images/chat_profile-me.png")
        for idx, chat in enumerate(st.session_state.chat_history):
            message_key = f"{chat['role']}_{idx}"  # Unique key based on role and index
            if chat["role"] == "user":
                # Use st.chat_message for user message
                with st.chat_message("user",avatar="images/chat_profile-me.png"):
                    st.write(chat['message'])
            else:
                # Use st.chat_message for bot message
                with st.chat_message("assistant", avatar="images/chat_profile-bot.png"):
                    st.write(chat['message'])
        # Collect user input
        if user_input := st.chat_input(placeholder="Your Message"):
            st.session_state.chat_history.append({"role": "user", "message": user_input})
            st.session_state.process_input = True
            st.rerun()
        # Check if we need to process the input
        if st.session_state.get("process_input"):
            with st.spinner('Processing...'):
                general_chat(st.session_state.chat_history[-1]["message"])
                st.session_state.process_input = False
                st.rerun()

In my code instance, the chat window does not automatically scroll down to the bottom. If we scroll up, it remains at that position even after sending a new message.

import streamlit as st

# Initialize session state for chat messages
if 'chat_messages' not in st.session_state:
    st.session_state.chat_messages = []

# Render the chat messages
for message in st.session_state.chat_messages:
    with st.chat_message(message['role']):
        st.markdown(message['content'])

if prompt := st.chat_input("Type your message here..."):
    st.session_state.chat_messages.append({"role": "user", "content": prompt})

    with st.chat_message("user"):
        st.markdown(prompt)

    response = "This is a demo response"
    st.session_state.chat_messages.append({"role": "assistant", "content": response})

    with st.chat_message("assistant"):
        st.markdown(response)

I stream the response from the LLM and use st.write() and as such the answer from the LLM increases then scroll bar moves downward. If I scroll upwards it remains static but if I do not and just wait for the answer the scroll bar keeps moving downwards showing latest output.