diff --git a/docs/async_data_client/async_data_execute_query_iterator.rst b/docs/async_data_client/async_data_execute_query_iterator.rst new file mode 100644 index 000000000..b911fab7f --- /dev/null +++ b/docs/async_data_client/async_data_execute_query_iterator.rst @@ -0,0 +1,6 @@ +Execute Query Iterator Async +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: google.cloud.bigtable.data.execute_query.ExecuteQueryIteratorAsync + :members: + :show-inheritance: diff --git a/docs/async_data_client/async_data_execute_query_metadata.rst b/docs/async_data_client/async_data_execute_query_metadata.rst new file mode 100644 index 000000000..69add630d --- /dev/null +++ b/docs/async_data_client/async_data_execute_query_metadata.rst @@ -0,0 +1,6 @@ +Execute Query Metadata +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.execute_query.metadata + :members: + :show-inheritance: diff --git a/docs/async_data_client/async_data_execute_query_values.rst b/docs/async_data_client/async_data_execute_query_values.rst new file mode 100644 index 000000000..6c4fb71c1 --- /dev/null +++ b/docs/async_data_client/async_data_execute_query_values.rst @@ -0,0 +1,6 @@ +Execute Query Values +~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.bigtable.data.execute_query.values + :members: + :show-inheritance: diff --git a/docs/async_data_client/async_data_usage.rst b/docs/async_data_client/async_data_usage.rst index 8843b506b..61d5837fd 100644 --- a/docs/async_data_client/async_data_usage.rst +++ b/docs/async_data_client/async_data_usage.rst @@ -13,3 +13,6 @@ Async Data Client async_data_mutations async_data_read_modify_write_rules async_data_exceptions + async_data_execute_query_iterator + async_data_execute_query_values + async_data_execute_query_metadata diff --git a/google/cloud/bigtable/data/_async/client.py b/google/cloud/bigtable/data/_async/client.py index 600937df8..82a874918 100644 --- a/google/cloud/bigtable/data/_async/client.py +++ b/google/cloud/bigtable/data/_async/client.py @@ -456,38 +456,38 @@ async def execute_query( retryable_errors list until operation_timeout is reached. Args: - - query: Query to be run on Bigtable instance. The query can use ``@param`` + query: Query to be run on Bigtable instance. The query can use ``@param`` placeholders to use parameter interpolation on the server. Values for all parameters should be provided in ``parameters``. Types of parameters are inferred but should be provided in ``parameter_types`` if the inference is not possible (i.e. when value can be None, an empty list or an empty dict). - - instance_id: The Bigtable instance ID to perform the query on. + instance_id: The Bigtable instance ID to perform the query on. instance_id is combined with the client's project to fully specify the instance. - - parameters: Dictionary with values for all parameters used in the ``query``. - - parameter_types: Dictionary with types of parameters used in the ``query``. + parameters: Dictionary with values for all parameters used in the ``query``. + parameter_types: Dictionary with types of parameters used in the ``query``. Required to contain entries only for parameters whose type cannot be detected automatically (i.e. the value can be None, an empty list or an empty dict). - - app_profile_id: The app profile to associate with requests. + app_profile_id: The app profile to associate with requests. https://cloud.google.com/bigtable/docs/app-profiles - - operation_timeout: the time budget for the entire operation, in seconds. + operation_timeout: the time budget for the entire operation, in seconds. Failed requests will be retried within the budget. Defaults to 600 seconds. - - attempt_timeout: the time budget for an individual network request, in seconds. + attempt_timeout: the time budget for an individual network request, in seconds. If it takes longer than this time to complete, the request will be cancelled with a DeadlineExceeded exception, and a retry will be attempted. Defaults to the 20 seconds. If None, defaults to operation_timeout. - - retryable_errors: a list of errors that will be retried if encountered. + retryable_errors: a list of errors that will be retried if encountered. Defaults to 4 (DeadlineExceeded), 14 (ServiceUnavailable), and 10 (Aborted) Returns: - - an asynchronous iterator that yields rows returned by the query + ExecuteQueryIteratorAsync: an asynchronous iterator that yields rows returned by the query Raises: - - DeadlineExceeded: raised after operation timeout + google.api_core.exceptions.DeadlineExceeded: raised after operation timeout will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions from any retries that failed - - GoogleAPIError: raised if the request encounters an unrecoverable error + google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error """ warnings.warn( "ExecuteQuery is in preview and may change in the future.", diff --git a/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py b/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py index 3660c0b0f..32081939b 100644 --- a/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py +++ b/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py @@ -23,6 +23,7 @@ Optional, Sequence, Tuple, + TYPE_CHECKING, ) from google.api_core import retry as retries @@ -43,11 +44,14 @@ ExecuteQueryRequest as ExecuteQueryRequestPB, ) +if TYPE_CHECKING: + from google.cloud.bigtable.data import BigtableDataClientAsync + class ExecuteQueryIteratorAsync: """ ExecuteQueryIteratorAsync handles collecting streaming responses from the - ExecuteQuery RPC and parsing them to `QueryResultRow`s. + ExecuteQuery RPC and parsing them to QueryResultRows. ExecuteQueryIteratorAsync implements Asynchronous Iterator interface and can be used with "async for" syntax. It is also a context manager. @@ -55,23 +59,25 @@ class ExecuteQueryIteratorAsync: It is **not thread-safe**. It should not be used by multiple asyncio Tasks. Args: - client (google.cloud.bigtable.data._async.BigtableDataClientAsync): bigtable client - instance_id (str): id of the instance on which the query is executed - request_body (Dict[str, Any]): dict representing the body of the ExecuteQueryRequest - attempt_timeout (float | None): the time budget for the entire operation, in seconds. - Failed requests will be retried within the budget. - Defaults to 600 seconds. - operation_timeout (float): the time budget for an individual network request, in seconds. - If it takes longer than this time to complete, the request will be cancelled with - a DeadlineExceeded exception, and a retry will be attempted. - Defaults to the 20 seconds. If None, defaults to operation_timeout. - req_metadata (Sequence[Tuple[str, str]]): metadata used while sending the gRPC request - retryable_excs (List[type[Exception]]): a list of errors that will be retried if encountered. + client: bigtable client + instance_id: id of the instance on which the query is executed + request_body: dict representing the body of the ExecuteQueryRequest + attempt_timeout: the time budget for the entire operation, in seconds. + Failed requests will be retried within the budget. + Defaults to 600 seconds. + operation_timeout: the time budget for an individual network request, in seconds. + If it takes longer than this time to complete, the request will be cancelled with + a DeadlineExceeded exception, and a retry will be attempted. + Defaults to the 20 seconds. If None, defaults to operation_timeout. + req_metadata: metadata used while sending the gRPC request + retryable_excs: a list of errors that will be retried if encountered. + Raises: + RuntimeError: if the instance is not created within an async event loop context. """ def __init__( self, - client: Any, + client: BigtableDataClientAsync, instance_id: str, app_profile_id: Optional[str], request_body: Dict[str, Any], @@ -112,15 +118,18 @@ def __init__( ) from e @property - def is_closed(self): + def is_closed(self) -> bool: + """Returns True if the iterator is closed, False otherwise.""" return self._is_closed @property - def app_profile_id(self): + def app_profile_id(self) -> Optional[str]: + """Returns the app_profile_id of the iterator.""" return self._app_profile_id @property - def table_name(self): + def table_name(self) -> Optional[str]: + """Returns the table_name of the iterator.""" return self._table_name async def _make_request_with_resume_token(self): @@ -176,7 +185,7 @@ async def _next_impl(self) -> AsyncIterator[QueryResultRow]: yield result await self.close() - async def __anext__(self): + async def __anext__(self) -> QueryResultRow: if self._is_closed: raise StopAsyncIteration return await self._result_generator.__anext__() diff --git a/google/cloud/bigtable/data/execute_query/metadata.py b/google/cloud/bigtable/data/execute_query/metadata.py index 4c08cbad3..0c9cf9697 100644 --- a/google/cloud/bigtable/data/execute_query/metadata.py +++ b/google/cloud/bigtable/data/execute_query/metadata.py @@ -90,6 +90,8 @@ def __repr__(self) -> str: return self.__str__() class Struct(_NamedList[Type], Type): + """Struct SQL type.""" + @classmethod def from_pb_type(cls, type_pb: Optional[PBType] = None) -> "SqlType.Struct": if type_pb is None: @@ -120,6 +122,8 @@ def __str__(self): return super(_NamedList, self).__str__() class Array(Type): + """Array SQL type.""" + def __init__(self, element_type: "SqlType.Type"): if isinstance(element_type, SqlType.Array): raise ValueError("Arrays of arrays are not supported.") @@ -148,6 +152,8 @@ def __str__(self) -> str: return f"{self.__class__.__name__}<{str(self.element_type)}>" class Map(Type): + """Map SQL type.""" + def __init__(self, key_type: "SqlType.Type", value_type: "SqlType.Type"): self._key_type = key_type self._value_type = value_type @@ -189,32 +195,44 @@ def __str__(self) -> str: ) class Bytes(Type): + """Bytes SQL type.""" + expected_type = bytes value_pb_dict_field_name = "bytes_value" type_field_name = "bytes_type" class String(Type): + """String SQL type.""" + expected_type = str value_pb_dict_field_name = "string_value" type_field_name = "string_type" class Int64(Type): + """Int64 SQL type.""" + expected_type = int value_pb_dict_field_name = "int_value" type_field_name = "int64_type" class Float64(Type): + """Float64 SQL type.""" + expected_type = float value_pb_dict_field_name = "float_value" type_field_name = "float64_type" class Bool(Type): + """Bool SQL type.""" + expected_type = bool value_pb_dict_field_name = "bool_value" type_field_name = "bool_type" class Timestamp(Type): """ + Timestamp SQL type. + Timestamp supports :class:`DatetimeWithNanoseconds` but Bigtable SQL does not currently support nanoseconds precision. We support this for potential compatibility in the future. Nanoseconds are currently ignored. @@ -243,6 +261,8 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]: return {"timestamp_value": ts} class Date(Type): + """Date SQL type.""" + type_field_name = "date_type" expected_type = datetime.date @@ -265,10 +285,23 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]: class Metadata: + """ + Base class for metadata returned by the ExecuteQuery operation. + """ + pass class ProtoMetadata(Metadata): + """ + Metadata class for the ExecuteQuery operation. + + Args: + columns (List[Tuple[Optional[str], SqlType.Type]]): List of column + metadata tuples. Each tuple contains the column name and the column + type. + """ + class Column: def __init__(self, column_name: Optional[str], column_type: SqlType.Type): self._column_name = column_name diff --git a/google/cloud/bigtable/data/execute_query/values.py b/google/cloud/bigtable/data/execute_query/values.py index 394bef71e..80a0bff6f 100644 --- a/google/cloud/bigtable/data/execute_query/values.py +++ b/google/cloud/bigtable/data/execute_query/values.py @@ -112,8 +112,12 @@ def __repr__(self) -> str: class QueryResultRow(_NamedList[ExecuteQueryValueType]): - pass + """ + Represents a single row of the result + """ class Struct(_NamedList[ExecuteQueryValueType]): - pass + """ + Represents a struct value in the result + """