Skip to content

Commit

Permalink
Merge pull request #23 from Synthetixio/time-queries
Browse files Browse the repository at this point in the history
Measure query performance
  • Loading branch information
Tburm authored Oct 25, 2024
2 parents 8fb1d83 + a3b096c commit 85fb897
Show file tree
Hide file tree
Showing 4 changed files with 264 additions and 20 deletions.
46 changes: 27 additions & 19 deletions api/internal_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,24 +16,27 @@ def get_db_config(streamlit=True):
DB_PASS = st.secrets.database.DB_PASS
DB_HOST = st.secrets.database.DB_HOST
DB_PORT = st.secrets.database.DB_PORT
DB_ENV = st.secrets.database.DB_ENV
else:
load_dotenv()
DB_NAME = os.environ.get("DB_NAME")
DB_USER = os.environ.get("DB_USER")
DB_PASS = os.environ.get("DB_PASS")
DB_HOST = os.environ.get("DB_HOST")
DB_PORT = os.environ.get("DB_PORT")
DB_ENV = os.environ.get("DB_ENV")

return {
"dbname": DB_NAME,
"user": DB_USER,
"password": DB_PASS,
"host": DB_HOST,
"port": DB_PORT,
"env": DB_ENV,
}


def get_connection(db_config):
def _get_connection(db_config):
connection_string = f"postgresql://{db_config['user']}:{db_config['password']}@{db_config['host']}:{db_config['port']}/{db_config['dbname']}"
engine = sqlalchemy.create_engine(connection_string)
conn = engine.connect()
Expand All @@ -57,8 +60,13 @@ def __init__(
Args:
environment (str): The environment to query data for ('prod' or 'dev')
"""
self.environment = environment
self.db_config = get_db_config(streamlit)

if db_config["env"] is not None:
self.environment = self.db_config["env"]
else:
self.environment = environment

self.engine = self._create_engine()
self.Session = sessionmaker(bind=self.engine)

Expand All @@ -74,7 +82,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
self.engine.dispose()

@contextmanager
def get_connection(
def _get_connection(
self,
) -> Generator[sqlalchemy.engine.base.Connection, None, None]:
"""Context manager for database connections."""
Expand All @@ -94,15 +102,15 @@ def _run_query(self, query: str) -> pd.DataFrame:
Returns:
pandas.DataFrame: The query results.
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

# queries
def get_volume(
self,
chain: str,
start_date: datetime,
end_date: datetime,
chain: str = "arbitrum_mainnet",
resolution: str = "daily",
) -> pd.DataFrame:
"""
Expand All @@ -126,7 +134,7 @@ def get_volume(
WHERE ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_core_stats(
Expand Down Expand Up @@ -158,7 +166,7 @@ def get_core_stats(
GROUP BY ts, chain
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_core_stats_by_collateral(
Expand Down Expand Up @@ -203,7 +211,7 @@ def get_core_stats_by_collateral(
ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_core_account_activity(
Expand Down Expand Up @@ -239,7 +247,7 @@ def get_core_account_activity(
GROUP BY 1, 2, 3
ORDER BY 1
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_core_nof_stakers(
Expand Down Expand Up @@ -270,7 +278,7 @@ def get_core_nof_stakers(
WHERE date >= '{start_date}' and date <= '{end_date}'
ORDER BY date
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_perps_stats(
Expand Down Expand Up @@ -304,7 +312,7 @@ def get_perps_stats(
ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_perps_open_interest(
Expand Down Expand Up @@ -339,7 +347,7 @@ def get_perps_open_interest(
GROUP BY 1, 2
ORDER BY 2, 1
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_perps_markets_history(
Expand All @@ -358,23 +366,23 @@ def get_perps_markets_history(
Returns:
pandas.DataFrame: Perps markets history with columns:
'ts', 'chain', 'market_symbol', 'size_usd', 'long_oi_pct', 'short_oi_pct'
'ts', 'chain', 'market_symbol', 'total_oi_usd', 'long_oi_pct', 'short_oi_pct'
"""
chain_label = self.SUPPORTED_CHAINS[chain]
query = f"""
SELECT
ts,
'{chain_label}' AS chain,
CONCAT(market_symbol, ' (', '{chain_label}', ')') as market_symbol,
size_usd,
total_oi_usd,
long_oi_pct,
short_oi_pct
FROM {self.environment}_{chain}.fct_perp_market_history_{chain}
WHERE
ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_perps_account_activity(
Expand Down Expand Up @@ -409,7 +417,7 @@ def get_perps_account_activity(
GROUP BY 1, 2
ORDER BY 1
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_snx_token_buyback(
Expand Down Expand Up @@ -442,7 +450,7 @@ def get_snx_token_buyback(
ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

# V2 queries
Expand Down Expand Up @@ -477,7 +485,7 @@ def get_perps_v2_stats(
ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)

def get_perps_v2_open_interest(
Expand Down Expand Up @@ -510,5 +518,5 @@ def get_perps_v2_open_interest(
ts >= '{start_date}' and ts <= '{end_date}'
ORDER BY ts
"""
with self.get_connection() as conn:
with self._get_connection() as conn:
return pd.read_sql_query(query, conn)
12 changes: 11 additions & 1 deletion dashboards/system_monitor/app.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
from dotenv import load_dotenv
import streamlit as st
from api.internal_api import SynthetixAPI, get_db_config

load_dotenv()

Expand All @@ -22,13 +23,22 @@
st.markdown(hide_footer, unsafe_allow_html=True)


# set the API
@st.cache_resource
def load_api():
return SynthetixAPI(db_config=get_db_config(streamlit=True))


st.session_state.api = load_api()

# pages
core = st.Page("views/core.py", title="Core System")
perps = st.Page("views/perps.py", title="Perps Markets")
performance = st.Page("views/performance.py", title="Query Performance")

# navigation
pages = {
"": [core, perps],
"": [core, perps, performance],
}
nav = st.navigation(pages)
nav.run()
23 changes: 23 additions & 0 deletions dashboards/system_monitor/views/performance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import streamlit as st

from api.internal_api import SynthetixAPI, get_db_config
from dashboards.utils import performance

st.markdown("# Query Performance")

if "df_query" not in st.session_state:
st.session_state.df_query = None


def time_queries():
results = performance.run_benchmarks(st.session_state.api)

# create dataframe
df = performance.create_benchmark_dataframe(results)
st.session_state.df_query = df


st.button("Run queries", on_click=time_queries)

if st.session_state.df_query is not None:
st.dataframe(st.session_state.df_query)
Loading

0 comments on commit 85fb897

Please sign in to comment.