diff --git a/CHANGES.rst b/CHANGES.rst index b3061d42..6a3599b0 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,6 +1,11 @@ Changelog and versioning ========================== +2.0.4 +----- +- Fix a bug where the groupby on the MetricsQuery was not being serialized + + 2.0.3 ----- diff --git a/snuba_sdk/metrics_query_visitors.py b/snuba_sdk/metrics_query_visitors.py index 14b684e4..69bed611 100644 --- a/snuba_sdk/metrics_query_visitors.py +++ b/snuba_sdk/metrics_query_visitors.py @@ -7,6 +7,7 @@ # Import the module due to sphinx autodoc problems # https://github.com/agronholm/sphinx-autodoc-typehints#dealing-with-circular-imports from snuba_sdk import metrics_query as main +from snuba_sdk.aliased_expression import AliasedExpression from snuba_sdk.column import Column from snuba_sdk.conditions import BooleanCondition, Condition, ConditionGroup, Op from snuba_sdk.expressions import list_type @@ -52,7 +53,9 @@ def _visit_filters(self, filters: ConditionGroup | None) -> QVisited: raise NotImplementedError @abstractmethod - def _visit_groupby(self, groupby: Sequence[Column] | None) -> QVisited: + def _visit_groupby( + self, groupby: Sequence[Column | AliasedExpression] | None + ) -> QVisited: raise NotImplementedError @abstractmethod @@ -117,6 +120,8 @@ def _combine( if timeseries_data["groupby"]: groupby_columns.append(timeseries_data["groupby"]) + if returns["groupby"]: + groupby_columns.append(returns["groupby"]) where_clauses.append(timeseries_data["metric_filter"]) if timeseries_data["filters"]: @@ -160,7 +165,9 @@ def _visit_filters(self, filters: ConditionGroup | None) -> str: return f"{' AND '.join(self.expression_visitor.visit(c) for c in filters)}" return "" - def _visit_groupby(self, groupby: Sequence[Column] | None) -> str: + def _visit_groupby( + self, groupby: Sequence[Column | AliasedExpression] | None + ) -> str: if groupby is not None: return f"{', '.join(self.expression_visitor.visit(g) for g in groupby)}" return "" @@ -212,9 +219,11 @@ def _visit_filters(self, filters: ConditionGroup | None) -> None: raise InvalidMetricsQueryError("filters must be a list of Conditions") (c.validate() for c in filters) - def _visit_groupby(self, groupby: Sequence[Column] | None) -> None: + def _visit_groupby( + self, groupby: Sequence[Column | AliasedExpression] | None + ) -> None: if groupby is not None: - if not list_type(groupby, (Column,)): + if not list_type(groupby, (Column, AliasedExpression)): raise InvalidMetricsQueryError("groupby must be a list of Columns") for g in groupby: g.validate() diff --git a/tests/test_metrics_query.py b/tests/test_metrics_query.py index a72ed8cc..72830362 100644 --- a/tests/test_metrics_query.py +++ b/tests/test_metrics_query.py @@ -5,7 +5,9 @@ import pytest -from snuba_sdk import Column, Condition, Op +from snuba_sdk.aliased_expression import AliasedExpression +from snuba_sdk.column import Column +from snuba_sdk.conditions import Condition, Op from snuba_sdk.metrics_query import MetricsQuery from snuba_sdk.metrics_query_visitors import InvalidMetricsQueryError from snuba_sdk.timeseries import Metric, MetricsScope, Rollup, Timeseries @@ -61,7 +63,7 @@ org_ids=[1], project_ids=[11], use_case_id="transactions" ), ), - "MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time` WHERE granularity = 3600 AND metric_id = 123 AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC", + "MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time`, tags[status_code] WHERE granularity = 3600 AND metric_id = 123 AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC", id="top level filters/group by", ), pytest.param( @@ -87,7 +89,7 @@ org_ids=[1], project_ids=[11], use_case_id="transactions" ), ), - "MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time`, tags[environment] WHERE granularity = 3600 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC", + "MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(3600), 'Universal') AS `time`, tags[environment], tags[status_code] WHERE granularity = 3600 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC", id="top level filters/group by with low level filters", ), pytest.param( @@ -113,9 +115,34 @@ org_ids=[1], project_ids=[11], use_case_id="transactions" ), ), - "MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY tags[environment] WHERE granularity = 60 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05')", + "MATCH (metrics_sets) SELECT max(value) AS `aggregate_value` BY tags[environment], tags[status_code] WHERE granularity = 60 AND metric_id = 123 AND tags[referrer] = 'foo' AND tags[transaction] = 'foo' AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05')", id="totals query", ), + pytest.param( + MetricsQuery( + query=Timeseries( + metric=Metric( + "transaction.duration", + "d:transactions/duration@millisecond", + 123, + "metrics_sets", + ), + aggregate="quantiles", + aggregate_params=[0.5, 0.99], + ), + start=NOW, + end=NOW + timedelta(days=14), + groupby=[AliasedExpression(Column("tags[transaction]"), "transaction")], + rollup=Rollup(interval=60, granularity=60), + scope=MetricsScope( + org_ids=[1], + project_ids=[11], + use_case_id="transactions", + ), + ), + "MATCH (metrics_sets) SELECT quantiles(0.5, 0.99)(value) AS `aggregate_value` BY toStartOfInterval(timestamp, toIntervalSecond(60), 'Universal') AS `time`, tags[transaction] AS `transaction` WHERE granularity = 60 AND metric_id = 123 AND (org_id IN array(1) AND project_id IN array(11) AND use_case_id = 'transactions') AND timestamp >= toDateTime('2023-01-02T03:04:05') AND timestamp < toDateTime('2023-01-16T03:04:05') ORDER BY time ASC", + id="aliased groupby", + ), ]