Hello, I´m building a dashbord with Streamlit where my data is in a Postgresql Database on AWS.
My question is, how can I cache instead of rerun the consults everytime ?
def create_connection(db_name, db_user, db_password, db_host, db_port):
try:
connection = psycopg2.connect(
database=db_name,
user=db_user,
password=db_password,
host=db_host,
port=db_port,
)
print("Connection to PostgreSQL DB successful")
except OperationalError as e:
print(f"The error '{e}' occurred")
return connection
def execute_query(connection, query):
connection.autocommit = True
cursor = connection.cursor()
try:
cursor.execute(query)
print("Query executed successfully")
except OperationalError as e:
print(f"The error '{e}' occurred")
@st.cache(allow_output_mutation=True, hash_funcs={connection:id})
def get_data(query,connection):
data = pd.read_sql(query,connection)
return data
connection = create_connection('db_name',
'db_user',
'db_password=2121',
'db_host',
'db_port')
## ERROR ##
AttributeError: 'psycopg2.extensions.connection' object has no attribute '__module__'