When the number of records in the dataframe reaches 120,000 records, the page loads very slowly and even dead,as follows:
import streamlit as st
import numpy as np
import pandas as pd
st.title(“这是用streamlit Web框架启动的web应用\n @数字化审计”)
@st.cache
def load_contract():
filepath=r'd:/python/myworks/web/streamlit/ht.csv'
df=pd.read_csv(filepath,encoding='utf_8_sig') #支持中文路径
return df
df=load_contract()
#合同类别
htType_list=df[“合同类别”].unique()
event_htType = st.sidebar.selectbox(“选择合同类别”, htType_list)
#单位名称
dwmc_list=df[“公司名称”].unique()
event_dwmcType = st.sidebar.selectbox(“选择公司名称”, dwmc_list)
part_df = df[(df[“合同类别”]==event_htType) & (df[‘公司名称’]==event_dwmcType)]
st.write(f"根据你的筛选,数据包含{len(part_df)}行")
st.table(part_df)