Skip to content

Commit

Permalink
drop unused dash app and upgrade sqlalchemy (#236)
Browse files Browse the repository at this point in the history
  • Loading branch information
maskarb authored Dec 3, 2024
1 parent 9138685 commit ad4f8fc
Show file tree
Hide file tree
Showing 17 changed files with 159 additions and 563 deletions.
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ ENV \

# copy the src files into the workdir
COPY kokudaily kokudaily
COPY app.py .
COPY job.py .
COPY LICENSE /licenses/MIT.txt

Expand Down
3 changes: 1 addition & 2 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ name = "pypi"
[packages]
pre-commit = "*"
psycopg2-binary = ">=2.7.0"
sqlalchemy = "==1.3.3"
dash = "*"
sqlalchemy = "*"
prometheus-client = "*"
python-dateutil = ">=2.8"
pytz = "*"
Expand Down
411 changes: 70 additions & 341 deletions Pipfile.lock

Large diffs are not rendered by default.

62 changes: 0 additions & 62 deletions app.py

This file was deleted.

3 changes: 0 additions & 3 deletions kokudaily/charts/__init__.py

This file was deleted.

23 changes: 0 additions & 23 deletions kokudaily/charts/engineering.py

This file was deleted.

6 changes: 0 additions & 6 deletions kokudaily/charts/index.py

This file was deleted.

23 changes: 0 additions & 23 deletions kokudaily/charts/marketing.py

This file was deleted.

15 changes: 0 additions & 15 deletions kokudaily/charts/utils.py

This file was deleted.

5 changes: 3 additions & 2 deletions kokudaily/reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from kokudaily.config import Config
from kokudaily.engine import DB_ENGINE
from pytz import UTC
from sqlalchemy import text

LOG = logging.getLogger(__name__)
USAGE_REPORT_PARAMS = {
Expand Down Expand Up @@ -379,7 +380,7 @@ def _read_sql(filename):
if os.path.exists(data_file) and os.path.isfile(data_file):
with open(data_file) as file:
data = file.read()
return data
return text(data)


def run_reports(filter_target=None):
Expand Down Expand Up @@ -440,7 +441,7 @@ def run_reports(filter_target=None):
for row in rs:
writer.writerow(row)
data.append(row)
data_dicts.append(dict(row))
data_dicts.append(row._asdict())
target_obj = report_data.get(target, {})
target_obj[report_name] = {
"data": data,
Expand Down
6 changes: 3 additions & 3 deletions kokudaily/sql/cust_cost_model_report_setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ INSERT
provider_id,
cluster_id
)
SELECT ''%%1$s'' AS "customer",
SELECT ''%1$s'' AS "customer",
cm.uuid AS "cost_model_id",
cm.source_type AS "source_type",
cm.created_timestamp AS "created_timestamp",
Expand All @@ -44,9 +44,9 @@ SELECT ''%%1$s'' AS "customer",
map.cost_model_id AS "cost_model_map_id",
p.uuid AS "provider_id",
auth.credentials->>''cluster_id'' AS "cluster_id"
FROM %%1$s.cost_model cm
FROM %1$s.cost_model cm
-- use left join for provider mapping to keep unused cost models
LEFT JOIN %%1$s.cost_model_map map
LEFT JOIN %1$s.cost_model_map map
ON map.cost_model_id = cm.uuid
JOIN public.api_provider p
ON p.uuid = map.provider_uuid
Expand Down
4 changes: 2 additions & 2 deletions kokudaily/sql/cust_node_report_setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,12 @@ BEGIN
ON c.schema_name = t.schema_name
JOIN public.api_provider p
ON p.customer_id = c.id
AND p.type = any( %(provider_types)s )
AND p.type = any( (:provider_types) )
WHERE t.schema_name ~ '^acct'
OR t.schema_name ~ '^org'
ORDER
BY t.schema_name
LOOP
EXECUTE format(stmt_tmpl, schema_rec.schema_name, %(start_time)s, %(end_time)s);
EXECUTE format(stmt_tmpl, schema_rec.schema_name, (:start_time), (:end_time));
END LOOP;
END $BODY$ LANGUAGE plpgsql;
24 changes: 12 additions & 12 deletions kokudaily/sql/cust_size_report_setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ insert
pvc_count,
tag_count
)
select ''%%1$s'' as "customer",
select ''%1$s'' as "customer",
rpp.provider_id as "provider_id",
rpp.report_period_start::date as "report_month",
count(distinct rpp.cluster_id) as "cluster_count",
Expand All @@ -39,14 +39,14 @@ select ''%%1$s'' as "customer",
max(rpml.tag_count) as "tag_count"
-- count(*) as "raw_lineitem_count"
-- starting with line item as we need the data ingestion counts
from %%1$s.reporting_ocpusagereportperiod rpp
join %%1$s.reporting_ocp_clusters c
from %1$s.reporting_ocpusagereportperiod rpp
join %1$s.reporting_ocp_clusters c
on rpp.cluster_id = c.cluster_id
join %%1$s.reporting_ocp_nodes n
join %1$s.reporting_ocp_nodes n
on c.uuid = n.cluster_id
join %%1$s.reporting_ocp_projects p
join %1$s.reporting_ocp_projects p
on c.uuid = p.cluster_id
join %%1$s.reporting_ocp_pvcs pvc
join %1$s.reporting_ocp_pvcs pvc
on c.uuid = pvc.cluster_id
-- transformations to get tag counts
join (
Expand All @@ -59,23 +59,23 @@ select ''%%1$s'' as "customer",
select distinct
ruls.report_period_id,
key || ''|'' || uv.value as "tag"
from %%1$s.reporting_ocpusagepodlabel_summary ruls
from %1$s.reporting_ocpusagepodlabel_summary ruls
left join lateral (select unnest(ruls.values)) as uv(value)
on true
union
select distinct
rsls.report_period_id,
key || ''|'' || sv.value as "tag"
from %%1$s.reporting_ocpstoragevolumelabel_summary rsls
from %1$s.reporting_ocpstoragevolumelabel_summary rsls
left join lateral (select unnest(rsls.values)) as sv(value)
on true
) rpta(report_period_id, tag)
group
by rpta.report_period_id
) as rpml(report_period_id, tag_count)
on rpml.report_period_id = rpp.id
where rpp.report_period_start < ''%%3$s''::timestamptz -- start must be < end bounds as end bounds is start of next month
and rpp.report_period_start >= ''%%2$s''::timestamptz -- end must be >= start bounds
where rpp.report_period_start < ''%3$s''::timestamptz -- start must be < end bounds as end bounds is start of next month
and rpp.report_period_start >= ''%2$s''::timestamptz -- end must be >= start bounds
group
by "customer",
rpp."provider_id",
Expand All @@ -91,12 +91,12 @@ begin
on c.schema_name = t.schema_name
join public.api_provider p
on p.customer_id = c.id
and p."type" = any( %(provider_types)s )
and p."type" = any( (:provider_types) )
where t.schema_name ~ '^acct'
or t.schema_name ~ '^org'
order
by t.schema_name
loop
execute format(stmt_tmpl, schema_rec.schema_name, %(start_time)s, %(end_time)s);
execute format(stmt_tmpl, schema_rec.schema_name, (:start_time), (:end_time));
end loop;
end $BODY$ language plpgsql;
10 changes: 5 additions & 5 deletions kokudaily/sql/cust_tag_report_setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,21 @@ insert
)
with cte_openshift_label_count AS (
select count(distinct key) as key_count
from %%1$s.reporting_ocptags_values
from %1$s.reporting_ocptags_values
),
cte_aws_tag_count AS (
select count(distinct key) as key_count
from %%1$s.reporting_awstags_values
from %1$s.reporting_awstags_values
),
cte_azure_tag_count AS (
select count(distinct key) as key_count
from %%1$s.reporting_azuretags_values
from %1$s.reporting_azuretags_values
),
cte_gcp_label_count AS (
select count(distinct key) as key_count
from %%1$s.reporting_gcptags_values
from %1$s.reporting_gcptags_values
)
select ''%%1$s'' as "customer",
select ''%1$s'' as "customer",
ocp.key_count as openshift_label_key_count,
aws.key_count as aws_tag_key_count,
azure.key_count as azure_tag_key_count,
Expand Down
Loading

0 comments on commit ad4f8fc

Please sign in to comment.