Skip to content

Commit

Permalink
fix: The groupby field of the MetricsQuery was not being serialized (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
evanh authored Oct 5, 2023
1 parent 00a3e63 commit 83006ef
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 8 deletions.
5 changes: 5 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
Changelog and versioning
==========================

2.0.4
-----
- Fix a bug where the groupby on the MetricsQuery was not being serialized


2.0.3
-----

Expand Down
17 changes: 13 additions & 4 deletions snuba_sdk/metrics_query_visitors.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# Import the module due to sphinx autodoc problems
# https://github.com/agronholm/sphinx-autodoc-typehints#dealing-with-circular-imports
from snuba_sdk import metrics_query as main
from snuba_sdk.aliased_expression import AliasedExpression
from snuba_sdk.column import Column
from snuba_sdk.conditions import BooleanCondition, Condition, ConditionGroup, Op
from snuba_sdk.expressions import list_type
Expand Down Expand Up @@ -52,7 +53,9 @@ def _visit_filters(self, filters: ConditionGroup | None) -> QVisited:
raise NotImplementedError

@abstractmethod
def _visit_groupby(self, groupby: Sequence[Column] | None) -> QVisited:
def _visit_groupby(
self, groupby: Sequence[Column | AliasedExpression] | None
) -> QVisited:
raise NotImplementedError

@abstractmethod
Expand Down Expand Up @@ -117,6 +120,8 @@ def _combine(

if timeseries_data["groupby"]:
groupby_columns.append(timeseries_data["groupby"])
if returns["groupby"]:
groupby_columns.append(returns["groupby"])

where_clauses.append(timeseries_data["metric_filter"])
if timeseries_data["filters"]:
Expand Down Expand Up @@ -160,7 +165,9 @@ def _visit_filters(self, filters: ConditionGroup | None) -> str:
return f"{' AND '.join(self.expression_visitor.visit(c) for c in filters)}"
return ""

def _visit_groupby(self, groupby: Sequence[Column] | None) -> str:
def _visit_groupby(
self, groupby: Sequence[Column | AliasedExpression] | None
) -> str:
if groupby is not None:
return f"{', '.join(self.expression_visitor.visit(g) for g in groupby)}"
return ""
Expand Down Expand Up @@ -212,9 +219,11 @@ def _visit_filters(self, filters: ConditionGroup | None) -> None:
raise InvalidMetricsQueryError("filters must be a list of Conditions")
(c.validate() for c in filters)

def _visit_groupby(self, groupby: Sequence[Column] | None) -> None:
def _visit_groupby(
self, groupby: Sequence[Column | AliasedExpression] | None
) -> None:
if groupby is not None:
if not list_type(groupby, (Column,)):
if not list_type(groupby, (Column, AliasedExpression)):
raise InvalidMetricsQueryError("groupby must be a list of Columns")
for g in groupby:
g.validate()
Expand Down
35 changes: 31 additions & 4 deletions tests/test_metrics_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@

import pytest

from snuba_sdk import Column, Condition, Op
from snuba_sdk.aliased_expression import AliasedExpression
from snuba_sdk.column import Column
from snuba_sdk.conditions import Condition, Op
from snuba_sdk.metrics_query import MetricsQuery
from snuba_sdk.metrics_query_visitors import InvalidMetricsQueryError
from snuba_sdk.timeseries import Metric, MetricsScope, Rollup, Timeseries
Expand Down Expand Up @@ -61,7 +63,7 @@
org_ids=[1], project_ids=[11], use_case_id="transactions"
),
),
"MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time` WHERE granularity = 3600 AND metric_id = 123 AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC",
"MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time`, tags[status_code] WHERE granularity = 3600 AND metric_id = 123 AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC",
id="top level filters/group by",
),
pytest.param(
Expand All @@ -87,7 +89,7 @@
org_ids=[1], project_ids=[11], use_case_id="transactions"
),
),
"MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time`, tags[environment] WHERE granularity = 3600 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC",
"MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time`, tags[environment], tags[status_code] WHERE granularity = 3600 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC",
id="top level filters/group by with low level filters",
),
pytest.param(
Expand All @@ -113,9 +115,34 @@
org_ids=[1], project_ids=[11], use_case_id="transactions"
),
),
"MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY tags[environment] WHERE granularity = 60 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05')",
"MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY tags[environment], tags[status_code] WHERE granularity = 60 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05')",
id="totals query",
),
pytest.param(
MetricsQuery(
query=Timeseries(
metric=Metric(
"transaction.duration",
"d:transactions/duration@millisecond",
123,
"metrics_sets",
),
aggregate="quantiles",
aggregate_params=[0.5, 0.99],
),
start=NOW,
end=NOW + timedelta(days=14),
groupby=[AliasedExpression(Column("tags[transaction]"), "transaction")],
rollup=Rollup(interval=60, granularity=60),
scope=MetricsScope(
org_ids=[1],
project_ids=[11],
use_case_id="transactions",
),
),
"MATCH (metrics_sets) SELECT quantiles(0.5, 0.99)(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(60), 'Universal') AS `time`, tags[transaction] AS `transaction` WHERE granularity = 60 AND metric_id = 123 AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC",
id="aliased groupby",
),
]


Expand Down

0 comments on commit 83006ef

Please sign in to comment.