How to prevent st.upload from rerunning after uploading file

Summary

I have 6 st.expanders in the page and each of the them contain a st.file_upload and a spinner. When I upload file A and it is validated I get a green box saying the file was uploaded. However when I run the next upload for file B it reruns the previous the upload/validator on file A that had already been validated.

Steps to reproduce

with st.expander("Upload A CSV Files"):
    uploaded_file = st.file_uploader("Upload A CSV", type=["csv"])
    if uploaded_file is not None:
        try:
            # Connect to specific bucket for A
            bucket_name = "A"
            A_bucket = client.get_bucket(bucket_name)
            # Save uploaded file to bucket
            deals_blob = A_bucket.blob(uploaded_file.name)
            deals_blob.upload_from_file(uploaded_file)
        except Exception as e:
            st.error(
                "An error occurred while uploading the file: " + str(e))
​
        # Checking for error log
        error_log_name = os.path.splitext(uploaded_file.name)[
            0] + "_error_log.xlsx"
​
        with st.spinner("Checking for error log..."):
            time.sleep(15)
            error_log = None
            # Check for error log in cloud bucket
            for blob in A_bucket.list_blobs():
                if error_log_name in blob.name:
                    error_log = blob
                    break
            if error_log:
                error_log_df = pd.read_excel(error_log.download_as_string())
                st.dataframe(error_log_df)
                error_log.delete()
            else:
                st.success("File uploaded successfully!")
​
​
# B upload
with st.expander("Upload B CSV Files"):
    uploaded_file = st.file_uploader(
        "Upload B CSV", type=["csv"])
    if uploaded_file is not None:
        try:
            # Connect to specific bucket for B
            bucket_name = "B"
            B_bucket = client.get_bucket(bucket_name)
            # Save uploaded file to bucket
            deals_blob = B_bucket.blob(uploaded_file.name)
            deals_blob.upload_from_file(uploaded_file)
        except Exception as e:
            st.error(
                "An error occurred while uploading the file: " + str(e))
​
        # Checking for error log
        error_log_name = os.path.splitext(uploaded_file.name)[
            0] + "_error_log.xlsx"
​
        with st.spinner("Checking for error log..."):
            time.sleep(15)
            error_log = None
            # Check for error log in cloud bucket
            for blob in B_bucket.list_blobs():
                if error_log_name in blob.name:
                    error_log = blob
                    break
            if error_log:
                error_log_df = pd.read_excel(error_log.download_as_string())
                st.dataframe(error_log_df)
                error_log.delete()
            else:
                st.success("File uploaded successfully!")

The code should upload the file and validate without rerunning the previous uploaded/validated file.

Tried adding cache to the upload but the problem persists.

#Variables for A
bucket_name_A = "A"
A_bucket = client.get_bucket(bucket_name_A)

# Cache
@st.cache(suppress_st_warning=True, show_spinner=False)
def upload_file_A(file):
    # Save uploaded file to bucket
    deals_blob = A_bucket.blob(file.name)
    deals_blob.upload_from_file(file)
    return file.name

# A upload
with st.expander("Upload A CSV Files"):
    uploaded_file = st.file_uploader("Upload A CSV", type=["csv"])
    if uploaded_file is not None:
        try:
            file_name = upload_file_A(uploaded_file)
        except Exception as e:
            st.error(
                "An error occurred while uploading the file: " + str(e))

        # Checking for error log
        error_log_name = os.path.splitext(uploaded_file.name)[
            0] + "_error_log.xlsx"

        with st.spinner("Checking for error log..."):
            time.sleep(15)
            error_log = None
            # Check for error log in cloud bucket
            for blob in deals_bucket_deals.list_blobs():
                if error_log_name in blob.name:
                    error_log = blob
                    break
            if error_log:
                error_log_df = pd.read_excel(error_log.download_as_string())
                st.dataframe(error_log_df)
                error_log.delete()
            else:
                st.success("File uploaded successfully!")

#Variables for B
bucket_name_B = "B"
B_bucket = client.get_bucket(bucket_name_B)

@st.cache(suppress_st_warning=True, show_spinner=False)
def upload_file_B(file):
    # Save uploaded file to bucket
    deals_blob = B_bucket.blob(file.name)
    deals_blob.upload_from_file(file)
    return file.name

# B upload
with st.expander("Upload B CSV Files"):
    uploaded_file = st.file_uploader(
        "Upload B CSV", type=["csv"])
    if uploaded_file is not None:
        try:
            file_name = upload_file_B(uploaded_file)
        except Exception as e:
            st.error(
                "An error occurred while uploading the file: " + str(e))

        # Checking for error log
        error_log_name = os.path.splitext(uploaded_file.name)[
            0] + "_error_log.xlsx"

        with st.spinner("Checking for error log..."):
            time.sleep(15)
            error_log = None
            # Check for error log in cloud bucket
            for blob in deals_bucket_eligcrit.list_blobs():
                if error_log_name in blob.name:
                    error_log = blob
                    break
            if error_log:
                error_log_df = pd.read_excel(error_log.download_as_string())
                st.dataframe(error_log_df)
                error_log.delete()
            else:
                st.success("File uploaded successfully!")

Have a look on the main concept. This should be considered in app design.

A workaround is to create a session variable to track if uploaded file is already verified.

import streamlit as st


if 'ok_a' not in st.session_state:
    st.session_state.ok_a = False

# A upload
with st.expander("Upload A CSV Files"):
    uploaded_file = st.file_uploader("Upload A CSV", type=["csv"])
 
    if uploaded_file is not None:
        # your stuff

        # Check error. 
        if not st.session_state.ok_a:
            with st.spinner("Checking for error log..."):
                is_error = False
        
                if is_error:
                    st.dataframe(['log A'])
                else:
                    st.success("File A uploaded successfully!")
                    st.session_state.ok_a = True
        else:
             st.success("File A uploaded successfully!")