diff --git a/google/cloud/bigtable/data/_async/client.py b/google/cloud/bigtable/data/_async/client.py index 2864bd023..d75d557e8 100644 --- a/google/cloud/bigtable/data/_async/client.py +++ b/google/cloud/bigtable/data/_async/client.py @@ -520,6 +520,7 @@ async def execute_query( return ExecuteQueryIteratorAsync( self, instance_id, + app_profile_id, request_body, attempt_timeout, operation_timeout, diff --git a/google/cloud/bigtable/data/exceptions.py b/google/cloud/bigtable/data/exceptions.py index 269f98e8a..95cd44f2c 100644 --- a/google/cloud/bigtable/data/exceptions.py +++ b/google/cloud/bigtable/data/exceptions.py @@ -319,4 +319,3 @@ class InvalidExecuteQueryResponse(core_exceptions.GoogleAPICallError): class ParameterTypeInferenceFailed(ValueError): """Exception raised when query parameter types were not provided and cannot be inferred.""" - diff --git a/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py b/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py index 11e914daf..3660c0b0f 100644 --- a/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py +++ b/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py @@ -34,7 +34,7 @@ ) from google.cloud.bigtable.data.exceptions import InvalidExecuteQueryResponse from google.cloud.bigtable.data.execute_query.values import QueryResultRow -from google.cloud.bigtable.data.execute_query.metadata import Metadata +from google.cloud.bigtable.data.execute_query.metadata import Metadata, ProtoMetadata from google.cloud.bigtable.data.execute_query._reader import ( _QueryResultRowReader, _Reader, @@ -46,14 +46,14 @@ class ExecuteQueryIteratorAsync: """ - ExecuteQueryIteratorAsync handles collecting streaming responses from the + ExecuteQueryIteratorAsync handles collecting streaming responses from the ExecuteQuery RPC and parsing them to `QueryResultRow`s. - + ExecuteQueryIteratorAsync implements Asynchronous Iterator interface and can be used with "async for" syntax. It is also a context manager. - + It is **not thread-safe**. It should not be used by multiple asyncio Tasks. - + Args: client (google.cloud.bigtable.data._async.BigtableDataClientAsync): bigtable client instance_id (str): id of the instance on which the query is executed @@ -68,10 +68,12 @@ class ExecuteQueryIteratorAsync: req_metadata (Sequence[Tuple[str, str]]): metadata used while sending the gRPC request retryable_excs (List[type[Exception]]): a list of errors that will be retried if encountered. """ + def __init__( self, client: Any, instance_id: str, + app_profile_id: Optional[str], request_body: Dict[str, Any], attempt_timeout: float | None, operation_timeout: float, @@ -79,9 +81,10 @@ def __init__( retryable_excs: List[type[Exception]], ) -> None: self._table_name = None + self._app_profile_id = app_profile_id self._client = client self._instance_id = instance_id - self._byte_cursor = _ByteCursor() + self._byte_cursor = _ByteCursor[ProtoMetadata]() self._reader: _Reader[QueryResultRow] = _QueryResultRowReader(self._byte_cursor) self._result_generator = self._next_impl() self._register_instance_task = None @@ -112,6 +115,10 @@ def __init__( def is_closed(self): return self._is_closed + @property + def app_profile_id(self): + return self._app_profile_id + @property def table_name(self): return self._table_name @@ -179,7 +186,7 @@ def __aiter__(self): async def metadata(self) -> Optional[Metadata]: """ - Returns query metadata from the server or None if the iterator was + Returns query metadata from the server or None if the iterator was explicitly closed. """ if self._is_closed: diff --git a/google/cloud/bigtable/data/execute_query/_byte_cursor.py b/google/cloud/bigtable/data/execute_query/_byte_cursor.py index f17f8bb3e..60f23f541 100644 --- a/google/cloud/bigtable/data/execute_query/_byte_cursor.py +++ b/google/cloud/bigtable/data/execute_query/_byte_cursor.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Any, Generic, Optional, TypeVar from google.cloud.bigtable_v2 import ExecuteQueryResponse from google.cloud.bigtable.data.execute_query.metadata import ( @@ -20,8 +20,10 @@ _pb_metadata_to_metadata_types, ) +MT = TypeVar("MT", bound=Metadata) # metadata type -class _ByteCursor: + +class _ByteCursor(Generic[MT]): """ Buffers bytes from `ExecuteQuery` responses until resume_token is received or end-of-stream is reached. :class:`google.cloud.bigtable_v2.types.bigtable.ExecuteQueryResponse` obtained from @@ -35,13 +37,13 @@ class _ByteCursor: """ def __init__(self): - self._metadata: Optional[Metadata] = None + self._metadata: Optional[MT] = None self._buffer = bytearray() self._resume_token = None self._last_response_results_field = None @property - def metadata(self) -> Optional[Metadata]: + def metadata(self) -> Optional[MT]: """ Returns: Metadata or None: Metadata read from the first response of the stream @@ -91,7 +93,8 @@ def consume_metadata(self, response: ExecuteQueryResponse) -> None: raise ValueError("Invalid state - metadata already consumed") if "metadata" in response: - self._metadata = _pb_metadata_to_metadata_types(response.metadata) + metadata: Any = _pb_metadata_to_metadata_types(response.metadata) + self._metadata = metadata else: raise ValueError("Invalid parameter - response without metadata") diff --git a/google/cloud/bigtable/data/execute_query/_parameters_formatting.py b/google/cloud/bigtable/data/execute_query/_parameters_formatting.py index 4da154ef0..c1478d7bb 100644 --- a/google/cloud/bigtable/data/execute_query/_parameters_formatting.py +++ b/google/cloud/bigtable/data/execute_query/_parameters_formatting.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Any, Optional +from typing import Any, Dict, Optional import datetime from google.api_core.datetime_helpers import DatetimeWithNanoseconds from google.cloud.bigtable.data.exceptions import ParameterTypeInferenceFailed @@ -23,14 +23,14 @@ def _format_execute_query_params( params: Optional[Dict[str, ExecuteQueryValueType]], parameter_types: Optional[Dict[str, SqlType.Type]], -) -> Dict: +) -> Any: """ Takes a dictionary of param_name -> param_value and optionally parameter types. If the parameters types are not provided, this function tries to infer them. Args: params (Optional[Dict[str, ExecuteQueryValueType]]): mapping from parameter names - like they appear in query (without @ at the beginning) to their values. + like they appear in query (without @ at the beginning) to their values. Only values of type ExecuteQueryValueType are permitted. parameter_types (Optional[Dict[str, SqlType.Type]]): mapping of parameter names to their types. @@ -69,7 +69,9 @@ def _format_execute_query_params( return result_values -def _convert_value_to_pb_value_dict(value: ExecuteQueryValueType, param_type: SqlType.Type) -> Dict: +def _convert_value_to_pb_value_dict( + value: ExecuteQueryValueType, param_type: SqlType.Type +) -> Any: """ Takes a value and converts it to a dictionary parsable to a protobuf. diff --git a/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py b/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py index d37a6bab5..b65dce27b 100644 --- a/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py +++ b/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any +from typing import Any, Callable, Dict, Type from google.cloud.bigtable.data.execute_query.values import Struct from google.cloud.bigtable.data.execute_query.metadata import SqlType from google.cloud.bigtable_v2 import Value as PBValue @@ -32,7 +32,7 @@ } -def _parse_array_type(value: PBValue, metadata_type: SqlType.Array) -> list: +def _parse_array_type(value: PBValue, metadata_type: SqlType.Array) -> Any: """ used for parsing an array represented as a protobuf to a python list. """ @@ -46,18 +46,17 @@ def _parse_array_type(value: PBValue, metadata_type: SqlType.Array) -> list: ) -def _parse_map_type(value: PBValue, metadata_type: SqlType.Map) -> dict: +def _parse_map_type(value: PBValue, metadata_type: SqlType.Map) -> Any: """ used for parsing a map represented as a protobuf to a python dict. - + Values of type `Map` are stored in a `Value.array_value` where each entry is another `Value.array_value` with two elements (the key and the value, in that order). - Normally encoded Map values won't have repeated keys, however, the client + Normally encoded Map values won't have repeated keys, however, the client must handle the case in which they do. If the same key appears multiple times, the _last_ value takes precedence. """ - try: return dict( @@ -79,7 +78,7 @@ def _parse_map_type(value: PBValue, metadata_type: SqlType.Map) -> dict: def _parse_struct_type(value: PBValue, metadata_type: SqlType.Struct) -> Struct: """ - used for parsing a struct represented as a protobuf to a + used for parsing a struct represented as a protobuf to a google.cloud.bigtable.data.execute_query.Struct """ if len(value.array_value.values) != len(metadata_type.fields): @@ -102,7 +101,7 @@ def _parse_timestamp_type( return DatetimeWithNanoseconds.from_timestamp_pb(value.timestamp_value) -_TYPE_PARSERS = { +_TYPE_PARSERS: Dict[Type[SqlType.Type], Callable[[PBValue, Any], Any]] = { SqlType.Timestamp: _parse_timestamp_type, SqlType.Struct: _parse_struct_type, SqlType.Array: _parse_array_type, @@ -112,7 +111,7 @@ def _parse_timestamp_type( def _parse_pb_value_to_python_value(value: PBValue, metadata_type: SqlType.Type) -> Any: """ - used for converting the value represented as a protobufs to a python object. + used for converting the value represented as a protobufs to a python object. """ value_kind = value.WhichOneof("kind") if not value_kind: diff --git a/google/cloud/bigtable/data/execute_query/_reader.py b/google/cloud/bigtable/data/execute_query/_reader.py index 4788c2a52..9c0259cde 100644 --- a/google/cloud/bigtable/data/execute_query/_reader.py +++ b/google/cloud/bigtable/data/execute_query/_reader.py @@ -19,6 +19,7 @@ Optional, List, Sequence, + cast, ) from abc import ABC, abstractmethod @@ -32,7 +33,7 @@ from google.cloud.bigtable.helpers import batched from google.cloud.bigtable.data.execute_query.values import QueryResultRow -from google.cloud.bigtable.data.execute_query.metadata import Metadata +from google.cloud.bigtable.data.execute_query.metadata import ProtoMetadata T = TypeVar("T") @@ -83,7 +84,7 @@ class _QueryResultRowReader(_Reader[QueryResultRow]): :class:`google.cloud.bigtable.byte_cursor._ByteCursor` passed in the constructor. """ - def __init__(self, byte_cursor: _ByteCursor): + def __init__(self, byte_cursor: _ByteCursor[ProtoMetadata]): """ Constructs new instance of ``_QueryResultRowReader``. @@ -97,14 +98,15 @@ def __init__(self, byte_cursor: _ByteCursor): self._byte_cursor = byte_cursor @property - def _metadata(self) -> Optional[Metadata]: + def _metadata(self) -> Optional[ProtoMetadata]: return self._byte_cursor.metadata - def _construct_query_result_row( - self, values: Sequence[PBValue] - ) -> List[QueryResultRow]: + def _construct_query_result_row(self, values: Sequence[PBValue]) -> QueryResultRow: result = QueryResultRow() - columns = self._metadata.columns + # The logic, not defined by mypy types, ensures that the value of + # "metadata" is never null at the time it is retrieved here + metadata = cast(ProtoMetadata, self._metadata) + columns = metadata.columns assert len(values) == len( columns @@ -125,7 +127,9 @@ def consume(self, bytes_to_consume: bytes) -> Optional[Iterable[QueryResultRow]] self._values.extend(self._parse_proto_rows(bytes_to_consume)) - num_columns = len(self._metadata.columns) + # The logic, not defined by mypy types, ensures that the value of + # "metadata" is never null at the time it is retrieved here + num_columns = len(cast(ProtoMetadata, self._metadata).columns) if len(self._values) < num_columns: return None diff --git a/google/cloud/bigtable/data/execute_query/metadata.py b/google/cloud/bigtable/data/execute_query/metadata.py index 0cb551c29..19bd136ba 100644 --- a/google/cloud/bigtable/data/execute_query/metadata.py +++ b/google/cloud/bigtable/data/execute_query/metadata.py @@ -26,15 +26,16 @@ List, Dict, Set, + Type, Union, Tuple, Any, ) -from google.cloud.bigtable.data.execute_query.values import _NamedList, Struct +from google.cloud.bigtable.data.execute_query.values import _NamedList from google.cloud.bigtable_v2 import ResultSetMetadata from google.cloud.bigtable_v2 import Type as PBType -from google.type import date_pb2 -from google.protobuf import timestamp_pb2 +from google.type import date_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.api_core.datetime_helpers import DatetimeWithNanoseconds import datetime @@ -47,15 +48,15 @@ class SqlType: """ class Type: - expected_type = None - value_pb_dict_field_name = None - type_field_name = None + expected_type: Optional[type] = None + value_pb_dict_field_name: Optional[str] = None + type_field_name: Optional[str] = None @classmethod def from_pb_type(cls, pb_type: Optional[PBType] = None): return cls() - def _to_type_pb_dict(self) -> dict: + def _to_type_pb_dict(self) -> Any: if not self.type_field_name: raise NotImplementedError( "Fill in expected_type and value_pb_dict_field_name" @@ -63,7 +64,7 @@ def _to_type_pb_dict(self) -> dict: return {self.type_field_name: {}} - def _to_value_pb_dict(self, value: Any) -> dict: + def _to_value_pb_dict(self, value: Any) -> Any: if self.expected_type is None or self.value_pb_dict_field_name is None: raise NotImplementedError( "Fill in expected_type and value_pb_dict_field_name" @@ -88,21 +89,23 @@ def __str__(self) -> str: def __repr__(self) -> str: return self.__str__() - class Struct(_NamedList["SqlType"], Type): + class Struct(_NamedList["SqlType.Type"], "SqlType.Type"): @classmethod - def from_pb_type(cls, type_pb: PBType) -> "Metadata.Struct": - fields = [] + def from_pb_type(cls, type_pb: Optional[PBType] = None) -> "SqlType.Struct": + if type_pb is None: + raise ValueError("missing required argument type_pb") + fields: List[Tuple[Optional[str], SqlType.Type]] = [] for field in type_pb.struct_type.fields: fields.append((field.field_name, _pb_type_to_metadata_type(field.type))) return cls(fields) - def _to_value_pb_dict(self, value: Struct): + def _to_value_pb_dict(self, value: Any): raise NotImplementedError("Struct is not supported as a query parameter") - def _to_type_pb_dict(self) -> dict: + def _to_type_pb_dict(self) -> Any: raise NotImplementedError("Struct is not supported as a query parameter") - def __eq__(self, other): + def __eq__(self, other: object): # Cannot use super() here - we'd either have to: # - call super() in these base classes, which would in turn call Object.__eq__ # to compare objects by identity and return a False, or @@ -110,13 +113,15 @@ def __eq__(self, other): # one of the __eq__ methods (a super() in the base class would be required to call the other one), or # - call super() in only one of the base classes, but that would be error prone and changing # the order of base classes would introduce unexpected behaviour. - return SqlType.Type.__eq__(self, other) and _NamedList.__eq__(self, other) + return super(SqlType.Type, self).__eq__(other) and super( + _NamedList, self + ).__eq__(other) def __str__(self): - return _NamedList.__str__(self) + return super(_NamedList, self).__str__() class Array(Type): - def __init__(self, element_type: "SqlType"): + def __init__(self, element_type: "SqlType.Type"): if isinstance(element_type, SqlType.Array): raise ValueError("Arrays of arrays are not supported.") self._element_type = element_type @@ -126,13 +131,15 @@ def element_type(self): return self._element_type @classmethod - def from_pb_type(cls, type_pb: PBType) -> "Metadata.Array": + def from_pb_type(cls, type_pb: Optional[PBType] = None) -> "SqlType.Array": + if type_pb is None: + raise ValueError("missing required argument type_pb") return cls(_pb_type_to_metadata_type(type_pb.array_type.element_type)) - def _to_value_pb_dict(self, value: list): + def _to_value_pb_dict(self, value: Any): raise NotImplementedError("Array is not supported as a query parameter") - def _to_type_pb_dict(self) -> dict: + def _to_type_pb_dict(self) -> Any: raise NotImplementedError("Array is not supported as a query parameter") def __eq__(self, other): @@ -142,7 +149,7 @@ def __str__(self) -> str: return f"{self.__class__.__name__}<{str(self.element_type)}>" class Map(Type): - def __init__(self, key_type: Union[str, bytes, int], value_type: "SqlType"): + def __init__(self, key_type: "SqlType.Type", value_type: "SqlType.Type"): self._key_type = key_type self._value_type = value_type @@ -155,16 +162,18 @@ def value_type(self): return self._value_type @classmethod - def from_pb_type(cls, type_pb: PBType) -> "Metadata.Map": + def from_pb_type(cls, type_pb: Optional[PBType] = None) -> "SqlType.Map": + if type_pb is None: + raise ValueError("missing required argument type_pb") return cls( _pb_type_to_metadata_type(type_pb.map_type.key_type), _pb_type_to_metadata_type(type_pb.map_type.value_type), ) - def _to_type_pb_dict(self) -> dict: + def _to_type_pb_dict(self) -> Any: raise NotImplementedError("Map is not supported as a query parameter") - def _to_value_pb_dict(self, value: dict): + def _to_value_pb_dict(self, value: Any): raise NotImplementedError("Map is not supported as a query parameter") def __eq__(self, other): @@ -212,13 +221,13 @@ class Timestamp(Type): DatetimeWithNanoseconds, ) - def _to_value_pb_dict(self, value: Any) -> dict: + def _to_value_pb_dict(self, value: Any) -> Any: if value is None: return {} if not isinstance(value, self.expected_types): raise ValueError( - f"Expected one of {', '.join((_type.__name__ for _type in self.expected_type))}" + f"Expected one of {', '.join((_type.__name__ for _type in self.expected_types))}" ) if isinstance(value, DatetimeWithNanoseconds): @@ -232,7 +241,7 @@ class Date(Type): type_field_name = "date_type" expected_type = datetime.date - def _to_value_pb_dict(self, value: Any) -> dict: + def _to_value_pb_dict(self, value: Any) -> Any: if value is None: return {} @@ -256,7 +265,7 @@ class Metadata: class ProtoMetadata(Metadata): class Column: - def __init__(self, column_name: Optional[str], column_type: SqlType): + def __init__(self, column_name: Optional[str], column_type: SqlType.Type): self._column_name = column_name self._column_type = column_type @@ -265,23 +274,26 @@ def column_name(self) -> Optional[str]: return self._column_name @property - def column_type(self) -> SqlType: + def column_type(self) -> SqlType.Type: return self._column_type @property def columns(self) -> List[Column]: return self._columns - def __init__(self, columns: Optional[List[Tuple[Optional[str], SqlType]]] = None): - self._columns: List[Tuple[Optional[str], SqlType]] = [] + def __init__( + self, columns: Optional[List[Tuple[Optional[str], SqlType.Type]]] = None + ): + self._columns: List[ProtoMetadata.Column] = [] self._column_indexes: Dict[str, List[int]] = defaultdict(list) self._duplicate_names: Set[str] = set() if columns: for column_name, column_type in columns: - if column_name in self._column_indexes: - self._duplicate_names.add(column_name) - self._column_indexes[column_name].append(len(self._columns)) + if column_name is not None: + if column_name in self._column_indexes: + self._duplicate_names.add(column_name) + self._column_indexes[column_name].append(len(self._columns)) self._columns.append(ProtoMetadata.Column(column_name, column_type)) def __getitem__(self, index_or_name: Union[str, int]) -> Column: @@ -313,7 +325,7 @@ def _pb_metadata_to_metadata_types( metadata_pb: ResultSetMetadata, ) -> Metadata: if "proto_schema" in metadata_pb: - fields = [] + fields: List[Tuple[Optional[str], SqlType.Type]] = [] for column_metadata in metadata_pb.proto_schema.columns: fields.append( (column_metadata.name, _pb_type_to_metadata_type(column_metadata.type)) @@ -322,7 +334,7 @@ def _pb_metadata_to_metadata_types( raise ValueError("Invalid ResultSetMetadata object received.") -_PROTO_TYPE_TO_METADATA_TYPE_FACTORY = { +_PROTO_TYPE_TO_METADATA_TYPE_FACTORY: Dict[str, Type[SqlType.Type]] = { "bytes_type": SqlType.Bytes, "string_type": SqlType.String, "int64_type": SqlType.Int64, @@ -336,7 +348,7 @@ def _pb_metadata_to_metadata_types( } -def _pb_type_to_metadata_type(type_pb: PBType) -> SqlType: +def _pb_type_to_metadata_type(type_pb: PBType) -> SqlType.Type: kind = PBType.pb(type_pb).WhichOneof("kind") if kind in _PROTO_TYPE_TO_METADATA_TYPE_FACTORY: return _PROTO_TYPE_TO_METADATA_TYPE_FACTORY[kind].from_pb_type(type_pb) diff --git a/google/cloud/bigtable/data/execute_query/values.py b/google/cloud/bigtable/data/execute_query/values.py index 416539cdc..ccab5188d 100644 --- a/google/cloud/bigtable/data/execute_query/values.py +++ b/google/cloud/bigtable/data/execute_query/values.py @@ -24,7 +24,7 @@ Tuple, Mapping, ) -from google.type import date_pb2 +from google.type import date_pb2 # type: ignore from google.api_core.datetime_helpers import DatetimeWithNanoseconds T = TypeVar("T") diff --git a/tests/unit/data/execute_query/__init__.py b/tests/unit/data/execute_query/__init__.py new file mode 100644 index 000000000..6d5e14bcf --- /dev/null +++ b/tests/unit/data/execute_query/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/data/execute_query/_async/__init__.py b/tests/unit/data/execute_query/_async/__init__.py new file mode 100644 index 000000000..6d5e14bcf --- /dev/null +++ b/tests/unit/data/execute_query/_async/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/data/_async/_testing.py b/tests/unit/data/execute_query/_async/_testing.py similarity index 98% rename from tests/unit/data/_async/_testing.py rename to tests/unit/data/execute_query/_async/_testing.py index 7f952711d..5a7acbdd9 100644 --- a/tests/unit/data/_async/_testing.py +++ b/tests/unit/data/execute_query/_async/_testing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/data/_async/test_query_iterator.py b/tests/unit/data/execute_query/_async/test_query_iterator.py similarity index 98% rename from tests/unit/data/_async/test_query_iterator.py rename to tests/unit/data/execute_query/_async/test_query_iterator.py index ea6dde6ab..5c577ed74 100644 --- a/tests/unit/data/_async/test_query_iterator.py +++ b/tests/unit/data/execute_query/_async/test_query_iterator.py @@ -109,7 +109,7 @@ async def test_iterator(proto_byte_stream): iterator = ExecuteQueryIteratorAsync( client=client_mock, instance_id="test-instance", - app_profile_id="app_profile_id", + app_profile_id="test_profile", request_body={}, attempt_timeout=10, operation_timeout=10, @@ -143,7 +143,7 @@ async def test_iterator_awaits_metadata(proto_byte_stream): iterator = ExecuteQueryIteratorAsync( client=client_mock, instance_id="test-instance", - app_profile_id="app_profile_id", + app_profile_id="test_profile", request_body={}, attempt_timeout=10, operation_timeout=10, diff --git a/tests/unit/data/execute_query/_testing.py b/tests/unit/data/execute_query/_testing.py new file mode 100644 index 000000000..9d24eee34 --- /dev/null +++ b/tests/unit/data/execute_query/_testing.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa +from .._testing import TYPE_INT, split_bytes_into_chunks, proto_rows_bytes diff --git a/tests/unit/data/test_byte_cursor.py b/tests/unit/data/execute_query/test_byte_cursor.py similarity index 100% rename from tests/unit/data/test_byte_cursor.py rename to tests/unit/data/execute_query/test_byte_cursor.py diff --git a/tests/unit/data/test_execute_query_parameters_parsing.py b/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py similarity index 100% rename from tests/unit/data/test_execute_query_parameters_parsing.py rename to tests/unit/data/execute_query/test_execute_query_parameters_parsing.py diff --git a/tests/unit/data/test_query_result_parsing_utils.py b/tests/unit/data/execute_query/test_query_result_parsing_utils.py similarity index 100% rename from tests/unit/data/test_query_result_parsing_utils.py rename to tests/unit/data/execute_query/test_query_result_parsing_utils.py diff --git a/tests/unit/data/test_query_result_row_reader.py b/tests/unit/data/execute_query/test_query_result_row_reader.py similarity index 100% rename from tests/unit/data/test_query_result_row_reader.py rename to tests/unit/data/execute_query/test_query_result_row_reader.py