diff --git a/google/cloud/bigtable_admin/__init__.py b/google/cloud/bigtable_admin/__init__.py index d26d79b3c..2884a96ab 100644 --- a/google/cloud/bigtable_admin/__init__.py +++ b/google/cloud/bigtable_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -117,6 +117,12 @@ ) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CopyBackupMetadata from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CopyBackupRequest +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + CreateAuthorizedViewMetadata, +) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + CreateAuthorizedViewRequest, +) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateBackupMetadata, ) @@ -130,6 +136,12 @@ CreateTableFromSnapshotRequest, ) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CreateTableRequest +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + DataBoostReadLocalWrites, +) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + DeleteAuthorizedViewRequest, +) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( DeleteBackupRequest, ) @@ -146,9 +158,18 @@ from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( GenerateConsistencyTokenResponse, ) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + GetAuthorizedViewRequest, +) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetBackupRequest from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetSnapshotRequest from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetTableRequest +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + ListAuthorizedViewsRequest, +) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + ListAuthorizedViewsResponse, +) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListBackupsRequest from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( ListBackupsResponse, @@ -179,12 +200,21 @@ from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( SnapshotTableRequest, ) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + StandardReadRemoteWrites, +) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UndeleteTableMetadata, ) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UndeleteTableRequest, ) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + UpdateAuthorizedViewMetadata, +) +from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + UpdateAuthorizedViewRequest, +) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateBackupRequest, ) @@ -200,6 +230,7 @@ from google.cloud.bigtable_admin_v2.types.instance import Cluster from google.cloud.bigtable_admin_v2.types.instance import HotTablet from google.cloud.bigtable_admin_v2.types.instance import Instance +from google.cloud.bigtable_admin_v2.types.table import AuthorizedView from google.cloud.bigtable_admin_v2.types.table import Backup from google.cloud.bigtable_admin_v2.types.table import BackupInfo from google.cloud.bigtable_admin_v2.types.table import ChangeStreamConfig @@ -210,6 +241,7 @@ from google.cloud.bigtable_admin_v2.types.table import Snapshot from google.cloud.bigtable_admin_v2.types.table import Table from google.cloud.bigtable_admin_v2.types.table import RestoreSourceType +from google.cloud.bigtable_admin_v2.types.types import Type __all__ = ( "BigtableInstanceAdminClient", @@ -246,20 +278,27 @@ "CheckConsistencyResponse", "CopyBackupMetadata", "CopyBackupRequest", + "CreateAuthorizedViewMetadata", + "CreateAuthorizedViewRequest", "CreateBackupMetadata", "CreateBackupRequest", "CreateTableFromSnapshotMetadata", "CreateTableFromSnapshotRequest", "CreateTableRequest", + "DataBoostReadLocalWrites", + "DeleteAuthorizedViewRequest", "DeleteBackupRequest", "DeleteSnapshotRequest", "DeleteTableRequest", "DropRowRangeRequest", "GenerateConsistencyTokenRequest", "GenerateConsistencyTokenResponse", + "GetAuthorizedViewRequest", "GetBackupRequest", "GetSnapshotRequest", "GetTableRequest", + "ListAuthorizedViewsRequest", + "ListAuthorizedViewsResponse", "ListBackupsRequest", "ListBackupsResponse", "ListSnapshotsRequest", @@ -272,8 +311,11 @@ "RestoreTableRequest", "SnapshotTableMetadata", "SnapshotTableRequest", + "StandardReadRemoteWrites", "UndeleteTableMetadata", "UndeleteTableRequest", + "UpdateAuthorizedViewMetadata", + "UpdateAuthorizedViewRequest", "UpdateBackupRequest", "UpdateTableMetadata", "UpdateTableRequest", @@ -285,6 +327,7 @@ "Cluster", "HotTablet", "Instance", + "AuthorizedView", "Backup", "BackupInfo", "ChangeStreamConfig", @@ -295,4 +338,5 @@ "Snapshot", "Table", "RestoreSourceType", + "Type", ) diff --git a/google/cloud/bigtable_admin_v2/__init__.py b/google/cloud/bigtable_admin_v2/__init__.py index 811b956e0..f2aea1667 100644 --- a/google/cloud/bigtable_admin_v2/__init__.py +++ b/google/cloud/bigtable_admin_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -53,20 +53,27 @@ from .types.bigtable_table_admin import CheckConsistencyResponse from .types.bigtable_table_admin import CopyBackupMetadata from .types.bigtable_table_admin import CopyBackupRequest +from .types.bigtable_table_admin import CreateAuthorizedViewMetadata +from .types.bigtable_table_admin import CreateAuthorizedViewRequest from .types.bigtable_table_admin import CreateBackupMetadata from .types.bigtable_table_admin import CreateBackupRequest from .types.bigtable_table_admin import CreateTableFromSnapshotMetadata from .types.bigtable_table_admin import CreateTableFromSnapshotRequest from .types.bigtable_table_admin import CreateTableRequest +from .types.bigtable_table_admin import DataBoostReadLocalWrites +from .types.bigtable_table_admin import DeleteAuthorizedViewRequest from .types.bigtable_table_admin import DeleteBackupRequest from .types.bigtable_table_admin import DeleteSnapshotRequest from .types.bigtable_table_admin import DeleteTableRequest from .types.bigtable_table_admin import DropRowRangeRequest from .types.bigtable_table_admin import GenerateConsistencyTokenRequest from .types.bigtable_table_admin import GenerateConsistencyTokenResponse +from .types.bigtable_table_admin import GetAuthorizedViewRequest from .types.bigtable_table_admin import GetBackupRequest from .types.bigtable_table_admin import GetSnapshotRequest from .types.bigtable_table_admin import GetTableRequest +from .types.bigtable_table_admin import ListAuthorizedViewsRequest +from .types.bigtable_table_admin import ListAuthorizedViewsResponse from .types.bigtable_table_admin import ListBackupsRequest from .types.bigtable_table_admin import ListBackupsResponse from .types.bigtable_table_admin import ListSnapshotsRequest @@ -79,8 +86,11 @@ from .types.bigtable_table_admin import RestoreTableRequest from .types.bigtable_table_admin import SnapshotTableMetadata from .types.bigtable_table_admin import SnapshotTableRequest +from .types.bigtable_table_admin import StandardReadRemoteWrites from .types.bigtable_table_admin import UndeleteTableMetadata from .types.bigtable_table_admin import UndeleteTableRequest +from .types.bigtable_table_admin import UpdateAuthorizedViewMetadata +from .types.bigtable_table_admin import UpdateAuthorizedViewRequest from .types.bigtable_table_admin import UpdateBackupRequest from .types.bigtable_table_admin import UpdateTableMetadata from .types.bigtable_table_admin import UpdateTableRequest @@ -92,6 +102,7 @@ from .types.instance import Cluster from .types.instance import HotTablet from .types.instance import Instance +from .types.table import AuthorizedView from .types.table import Backup from .types.table import BackupInfo from .types.table import ChangeStreamConfig @@ -102,11 +113,13 @@ from .types.table import Snapshot from .types.table import Table from .types.table import RestoreSourceType +from .types.types import Type __all__ = ( "BigtableInstanceAdminAsyncClient", "BigtableTableAdminAsyncClient", "AppProfile", + "AuthorizedView", "AutoscalingLimits", "AutoscalingTargets", "Backup", @@ -121,6 +134,8 @@ "CopyBackupMetadata", "CopyBackupRequest", "CreateAppProfileRequest", + "CreateAuthorizedViewMetadata", + "CreateAuthorizedViewRequest", "CreateBackupMetadata", "CreateBackupRequest", "CreateClusterMetadata", @@ -130,7 +145,9 @@ "CreateTableFromSnapshotMetadata", "CreateTableFromSnapshotRequest", "CreateTableRequest", + "DataBoostReadLocalWrites", "DeleteAppProfileRequest", + "DeleteAuthorizedViewRequest", "DeleteBackupRequest", "DeleteClusterRequest", "DeleteInstanceRequest", @@ -142,6 +159,7 @@ "GenerateConsistencyTokenRequest", "GenerateConsistencyTokenResponse", "GetAppProfileRequest", + "GetAuthorizedViewRequest", "GetBackupRequest", "GetClusterRequest", "GetInstanceRequest", @@ -151,6 +169,8 @@ "Instance", "ListAppProfilesRequest", "ListAppProfilesResponse", + "ListAuthorizedViewsRequest", + "ListAuthorizedViewsResponse", "ListBackupsRequest", "ListBackupsResponse", "ListClustersRequest", @@ -176,12 +196,16 @@ "Snapshot", "SnapshotTableMetadata", "SnapshotTableRequest", + "StandardReadRemoteWrites", "StorageType", "Table", + "Type", "UndeleteTableMetadata", "UndeleteTableRequest", "UpdateAppProfileMetadata", "UpdateAppProfileRequest", + "UpdateAuthorizedViewMetadata", + "UpdateAuthorizedViewRequest", "UpdateBackupRequest", "UpdateClusterMetadata", "UpdateInstanceMetadata", diff --git a/google/cloud/bigtable_admin_v2/gapic_metadata.json b/google/cloud/bigtable_admin_v2/gapic_metadata.json index 9b3426470..7cd09c43b 100644 --- a/google/cloud/bigtable_admin_v2/gapic_metadata.json +++ b/google/cloud/bigtable_admin_v2/gapic_metadata.json @@ -354,6 +354,11 @@ "copy_backup" ] }, + "CreateAuthorizedView": { + "methods": [ + "create_authorized_view" + ] + }, "CreateBackup": { "methods": [ "create_backup" @@ -369,6 +374,11 @@ "create_table_from_snapshot" ] }, + "DeleteAuthorizedView": { + "methods": [ + "delete_authorized_view" + ] + }, "DeleteBackup": { "methods": [ "delete_backup" @@ -394,6 +404,11 @@ "generate_consistency_token" ] }, + "GetAuthorizedView": { + "methods": [ + "get_authorized_view" + ] + }, "GetBackup": { "methods": [ "get_backup" @@ -414,6 +429,11 @@ "get_table" ] }, + "ListAuthorizedViews": { + "methods": [ + "list_authorized_views" + ] + }, "ListBackups": { "methods": [ "list_backups" @@ -459,6 +479,11 @@ "undelete_table" ] }, + "UpdateAuthorizedView": { + "methods": [ + "update_authorized_view" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -484,6 +509,11 @@ "copy_backup" ] }, + "CreateAuthorizedView": { + "methods": [ + "create_authorized_view" + ] + }, "CreateBackup": { "methods": [ "create_backup" @@ -499,6 +529,11 @@ "create_table_from_snapshot" ] }, + "DeleteAuthorizedView": { + "methods": [ + "delete_authorized_view" + ] + }, "DeleteBackup": { "methods": [ "delete_backup" @@ -524,6 +559,11 @@ "generate_consistency_token" ] }, + "GetAuthorizedView": { + "methods": [ + "get_authorized_view" + ] + }, "GetBackup": { "methods": [ "get_backup" @@ -544,6 +584,11 @@ "get_table" ] }, + "ListAuthorizedViews": { + "methods": [ + "list_authorized_views" + ] + }, "ListBackups": { "methods": [ "list_backups" @@ -589,6 +634,11 @@ "undelete_table" ] }, + "UpdateAuthorizedView": { + "methods": [ + "update_authorized_view" + ] + }, "UpdateBackup": { "methods": [ "update_backup" @@ -614,6 +664,11 @@ "copy_backup" ] }, + "CreateAuthorizedView": { + "methods": [ + "create_authorized_view" + ] + }, "CreateBackup": { "methods": [ "create_backup" @@ -629,6 +684,11 @@ "create_table_from_snapshot" ] }, + "DeleteAuthorizedView": { + "methods": [ + "delete_authorized_view" + ] + }, "DeleteBackup": { "methods": [ "delete_backup" @@ -654,6 +714,11 @@ "generate_consistency_token" ] }, + "GetAuthorizedView": { + "methods": [ + "get_authorized_view" + ] + }, "GetBackup": { "methods": [ "get_backup" @@ -674,6 +739,11 @@ "get_table" ] }, + "ListAuthorizedViews": { + "methods": [ + "list_authorized_views" + ] + }, "ListBackups": { "methods": [ "list_backups" @@ -719,6 +789,11 @@ "undelete_table" ] }, + "UpdateAuthorizedView": { + "methods": [ + "update_authorized_view" + ] + }, "UpdateBackup": { "methods": [ "update_backup" diff --git a/google/cloud/bigtable_admin_v2/services/__init__.py b/google/cloud/bigtable_admin_v2/services/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/google/cloud/bigtable_admin_v2/services/__init__.py +++ b/google/cloud/bigtable_admin_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py index 40631d1b4..09a827f87 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py index ab14ddaed..52c537260 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -225,7 +226,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, BigtableInstanceAdminTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, + BigtableInstanceAdminTransport, + Callable[..., BigtableInstanceAdminTransport], + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -237,9 +244,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.BigtableInstanceAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BigtableInstanceAdminTransport,Callable[..., BigtableInstanceAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BigtableInstanceAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -361,8 +370,8 @@ async def create_instance( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, instance, clusters]) if request is not None and has_flattened_params: raise ValueError( @@ -370,7 +379,10 @@ async def create_instance( "the individual field arguments should be set." ) - request = bigtable_instance_admin.CreateInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.CreateInstanceRequest): + request = bigtable_instance_admin.CreateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -386,11 +398,9 @@ async def create_instance( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_instance, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_instance + ] # Certain fields should be provided within the metadata header; # add these here. @@ -461,8 +471,8 @@ async def get_instance( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -470,7 +480,10 @@ async def get_instance( "the individual field arguments should be set." ) - request = bigtable_instance_admin.GetInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.GetInstanceRequest): + request = bigtable_instance_admin.GetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -479,21 +492,9 @@ async def get_instance( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_instance + ] # Certain fields should be provided within the metadata header; # add these here. @@ -553,8 +554,8 @@ async def list_instances( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -562,7 +563,10 @@ async def list_instances( "the individual field arguments should be set." ) - request = bigtable_instance_admin.ListInstancesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.ListInstancesRequest): + request = bigtable_instance_admin.ListInstancesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -571,21 +575,9 @@ async def list_instances( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_instances, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_instances + ] # Certain fields should be provided within the metadata header; # add these here. @@ -644,25 +636,16 @@ async def update_instance( """ # Create or coerce a protobuf request object. - request = instance.Instance(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, instance.Instance): + request = instance.Instance(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_instance + ] # Certain fields should be provided within the metadata header; # add these here. @@ -737,8 +720,8 @@ async def partial_update_instance( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -746,7 +729,12 @@ async def partial_update_instance( "the individual field arguments should be set." ) - request = bigtable_instance_admin.PartialUpdateInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, bigtable_instance_admin.PartialUpdateInstanceRequest + ): + request = bigtable_instance_admin.PartialUpdateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -757,21 +745,9 @@ async def partial_update_instance( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.partial_update_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.partial_update_instance + ] # Certain fields should be provided within the metadata header; # add these here. @@ -835,8 +811,8 @@ async def delete_instance( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -844,7 +820,10 @@ async def delete_instance( "the individual field arguments should be set." ) - request = bigtable_instance_admin.DeleteInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.DeleteInstanceRequest): + request = bigtable_instance_admin.DeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -853,11 +832,9 @@ async def delete_instance( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_instance, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_instance + ] # Certain fields should be provided within the metadata header; # add these here. @@ -942,8 +919,8 @@ async def create_cluster( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, cluster_id, cluster]) if request is not None and has_flattened_params: raise ValueError( @@ -951,7 +928,10 @@ async def create_cluster( "the individual field arguments should be set." ) - request = bigtable_instance_admin.CreateClusterRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.CreateClusterRequest): + request = bigtable_instance_admin.CreateClusterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -964,11 +944,9 @@ async def create_cluster( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_cluster, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_cluster + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1038,8 +1016,8 @@ async def get_cluster( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1047,7 +1025,10 @@ async def get_cluster( "the individual field arguments should be set." ) - request = bigtable_instance_admin.GetClusterRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.GetClusterRequest): + request = bigtable_instance_admin.GetClusterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1056,21 +1037,9 @@ async def get_cluster( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cluster, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cluster + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1132,8 +1101,8 @@ async def list_clusters( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1141,7 +1110,10 @@ async def list_clusters( "the individual field arguments should be set." ) - request = bigtable_instance_admin.ListClustersRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.ListClustersRequest): + request = bigtable_instance_admin.ListClustersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1150,21 +1122,9 @@ async def list_clusters( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_clusters, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_clusters + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1223,25 +1183,16 @@ async def update_cluster( """ # Create or coerce a protobuf request object. - request = instance.Cluster(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, instance.Cluster): + request = instance.Cluster(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cluster, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cluster + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1333,8 +1284,8 @@ async def partial_update_cluster( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([cluster, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1342,7 +1293,10 @@ async def partial_update_cluster( "the individual field arguments should be set." ) - request = bigtable_instance_admin.PartialUpdateClusterRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.PartialUpdateClusterRequest): + request = bigtable_instance_admin.PartialUpdateClusterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1353,11 +1307,9 @@ async def partial_update_cluster( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.partial_update_cluster, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.partial_update_cluster + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1421,8 +1373,8 @@ async def delete_cluster( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1430,7 +1382,10 @@ async def delete_cluster( "the individual field arguments should be set." ) - request = bigtable_instance_admin.DeleteClusterRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.DeleteClusterRequest): + request = bigtable_instance_admin.DeleteClusterRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1439,11 +1394,9 @@ async def delete_cluster( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_cluster, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_cluster + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1519,8 +1472,8 @@ async def create_app_profile( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, app_profile_id, app_profile]) if request is not None and has_flattened_params: raise ValueError( @@ -1528,7 +1481,10 @@ async def create_app_profile( "the individual field arguments should be set." ) - request = bigtable_instance_admin.CreateAppProfileRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.CreateAppProfileRequest): + request = bigtable_instance_admin.CreateAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1541,11 +1497,9 @@ async def create_app_profile( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_app_profile, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_app_profile + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1606,8 +1560,8 @@ async def get_app_profile( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1615,7 +1569,10 @@ async def get_app_profile( "the individual field arguments should be set." ) - request = bigtable_instance_admin.GetAppProfileRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.GetAppProfileRequest): + request = bigtable_instance_admin.GetAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1624,21 +1581,9 @@ async def get_app_profile( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_app_profile, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_app_profile + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1704,8 +1649,8 @@ async def list_app_profiles( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1713,7 +1658,10 @@ async def list_app_profiles( "the individual field arguments should be set." ) - request = bigtable_instance_admin.ListAppProfilesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.ListAppProfilesRequest): + request = bigtable_instance_admin.ListAppProfilesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1722,21 +1670,9 @@ async def list_app_profiles( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_app_profiles, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_app_profiles + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1815,8 +1751,8 @@ async def update_app_profile( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([app_profile, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1824,7 +1760,10 @@ async def update_app_profile( "the individual field arguments should be set." ) - request = bigtable_instance_admin.UpdateAppProfileRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.UpdateAppProfileRequest): + request = bigtable_instance_admin.UpdateAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1835,21 +1774,9 @@ async def update_app_profile( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_app_profile, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_app_profile + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1913,8 +1840,8 @@ async def delete_app_profile( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1922,7 +1849,10 @@ async def delete_app_profile( "the individual field arguments should be set." ) - request = bigtable_instance_admin.DeleteAppProfileRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.DeleteAppProfileRequest): + request = bigtable_instance_admin.DeleteAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1931,11 +1861,9 @@ async def delete_app_profile( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_app_profile, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_app_profile + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2020,8 +1948,8 @@ async def get_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2029,32 +1957,18 @@ async def get_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, - ) + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2141,8 +2055,8 @@ async def set_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2150,22 +2064,18 @@ async def set_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: - request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, - ) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2232,8 +2142,8 @@ async def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource, permissions]) if request is not None and has_flattened_params: raise ValueError( @@ -2241,33 +2151,20 @@ async def test_iam_permissions( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, - permissions=permissions, + resource=resource, permissions=permissions ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2331,8 +2228,8 @@ async def list_hot_tablets( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2340,7 +2237,10 @@ async def list_hot_tablets( "the individual field arguments should be set." ) - request = bigtable_instance_admin.ListHotTabletsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_instance_admin.ListHotTabletsRequest): + request = bigtable_instance_admin.ListHotTabletsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2349,21 +2249,9 @@ async def list_hot_tablets( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_hot_tablets, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_hot_tablets + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py index 4c2c2998e..550bcb1e7 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -654,7 +655,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BigtableInstanceAdminTransport]] = None, + transport: Optional[ + Union[ + str, + BigtableInstanceAdminTransport, + Callable[..., BigtableInstanceAdminTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -666,9 +673,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, BigtableInstanceAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BigtableInstanceAdminTransport,Callable[..., BigtableInstanceAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BigtableInstanceAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -777,8 +786,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[BigtableInstanceAdminTransport], + Callable[..., BigtableInstanceAdminTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BigtableInstanceAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -868,8 +885,8 @@ def create_instance( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, instance, clusters]) if request is not None and has_flattened_params: raise ValueError( @@ -877,10 +894,8 @@ def create_instance( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.CreateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.CreateInstanceRequest): request = bigtable_instance_admin.CreateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -967,8 +982,8 @@ def get_instance( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -976,10 +991,8 @@ def get_instance( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.GetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.GetInstanceRequest): request = bigtable_instance_admin.GetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1049,8 +1062,8 @@ def list_instances( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1058,10 +1071,8 @@ def list_instances( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.ListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.ListInstancesRequest): request = bigtable_instance_admin.ListInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1130,10 +1141,8 @@ def update_instance( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a instance.Instance. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, instance.Instance): request = instance.Instance(request) @@ -1214,8 +1223,8 @@ def partial_update_instance( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1223,10 +1232,8 @@ def partial_update_instance( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.PartialUpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, bigtable_instance_admin.PartialUpdateInstanceRequest ): @@ -1304,8 +1311,8 @@ def delete_instance( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1313,10 +1320,8 @@ def delete_instance( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.DeleteInstanceRequest): request = bigtable_instance_admin.DeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1411,8 +1416,8 @@ def create_cluster( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, cluster_id, cluster]) if request is not None and has_flattened_params: raise ValueError( @@ -1420,10 +1425,8 @@ def create_cluster( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.CreateClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.CreateClusterRequest): request = bigtable_instance_admin.CreateClusterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1507,8 +1510,8 @@ def get_cluster( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1516,10 +1519,8 @@ def get_cluster( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.GetClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.GetClusterRequest): request = bigtable_instance_admin.GetClusterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1591,8 +1592,8 @@ def list_clusters( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1600,10 +1601,8 @@ def list_clusters( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.ListClustersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.ListClustersRequest): request = bigtable_instance_admin.ListClustersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1672,10 +1671,8 @@ def update_cluster( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a instance.Cluster. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, instance.Cluster): request = instance.Cluster(request) @@ -1773,8 +1770,8 @@ def partial_update_cluster( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([cluster, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1782,10 +1779,8 @@ def partial_update_cluster( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.PartialUpdateClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.PartialUpdateClusterRequest): request = bigtable_instance_admin.PartialUpdateClusterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1861,8 +1856,8 @@ def delete_cluster( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1870,10 +1865,8 @@ def delete_cluster( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.DeleteClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.DeleteClusterRequest): request = bigtable_instance_admin.DeleteClusterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1959,8 +1952,8 @@ def create_app_profile( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, app_profile_id, app_profile]) if request is not None and has_flattened_params: raise ValueError( @@ -1968,10 +1961,8 @@ def create_app_profile( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.CreateAppProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.CreateAppProfileRequest): request = bigtable_instance_admin.CreateAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2046,8 +2037,8 @@ def get_app_profile( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2055,10 +2046,8 @@ def get_app_profile( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.GetAppProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.GetAppProfileRequest): request = bigtable_instance_admin.GetAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2134,8 +2123,8 @@ def list_app_profiles( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2143,10 +2132,8 @@ def list_app_profiles( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.ListAppProfilesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.ListAppProfilesRequest): request = bigtable_instance_admin.ListAppProfilesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2235,8 +2222,8 @@ def update_app_profile( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([app_profile, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2244,10 +2231,8 @@ def update_app_profile( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.UpdateAppProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.UpdateAppProfileRequest): request = bigtable_instance_admin.UpdateAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2323,8 +2308,8 @@ def delete_app_profile( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2332,10 +2317,8 @@ def delete_app_profile( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.DeleteAppProfileRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.DeleteAppProfileRequest): request = bigtable_instance_admin.DeleteAppProfileRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2430,8 +2413,8 @@ def get_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2440,8 +2423,8 @@ def get_iam_policy( ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: # Null request, just make one. @@ -2538,8 +2521,8 @@ def set_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2548,8 +2531,8 @@ def set_iam_policy( ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: # Null request, just make one. @@ -2626,8 +2609,8 @@ def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource, permissions]) if request is not None and has_flattened_params: raise ValueError( @@ -2636,8 +2619,8 @@ def test_iam_permissions( ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: # Null request, just make one. @@ -2713,8 +2696,8 @@ def list_hot_tablets( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2722,10 +2705,8 @@ def list_hot_tablets( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_instance_admin.ListHotTabletsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_instance_admin.ListHotTabletsRequest): request = bigtable_instance_admin.ListHotTabletsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py index 0d646a96e..f76da7622 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py index 62da28c88..45cf579fb 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py index aeb07556c..fc346c9bb 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py index c47db6ba5..49a1b9e11 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -59,7 +59,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -79,14 +79,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -96,11 +99,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -127,7 +130,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -168,7 +171,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py index cbd77b381..b85a696d9 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -74,7 +76,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -104,7 +105,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -124,15 +125,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -142,11 +146,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -173,7 +177,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -213,7 +217,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -888,6 +894,246 @@ def list_hot_tablets( ) return self._stubs["list_hot_tablets"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_instance: gapic_v1.method_async.wrap_method( + self.create_instance, + default_timeout=300.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method_async.wrap_method( + self.get_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_instances: gapic_v1.method_async.wrap_method( + self.list_instances, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method_async.wrap_method( + self.update_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.partial_update_instance: gapic_v1.method_async.wrap_method( + self.partial_update_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method_async.wrap_method( + self.delete_instance, + default_timeout=60.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method_async.wrap_method( + self.create_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method_async.wrap_method( + self.get_cluster, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_clusters: gapic_v1.method_async.wrap_method( + self.list_clusters, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method_async.wrap_method( + self.update_cluster, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.partial_update_cluster: gapic_v1.method_async.wrap_method( + self.partial_update_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method_async.wrap_method( + self.delete_cluster, + default_timeout=60.0, + client_info=client_info, + ), + self.create_app_profile: gapic_v1.method_async.wrap_method( + self.create_app_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.get_app_profile: gapic_v1.method_async.wrap_method( + self.get_app_profile, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_app_profiles: gapic_v1.method_async.wrap_method( + self.list_app_profiles, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_app_profile: gapic_v1.method_async.wrap_method( + self.update_app_profile, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_app_profile: gapic_v1.method_async.wrap_method( + self.delete_app_profile, + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method_async.wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method_async.wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method_async.wrap_method( + self.test_iam_permissions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_hot_tablets: gapic_v1.method_async.wrap_method( + self.list_hot_tablets, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py index 544649e90..7fdf89eb6 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py index 124b3ef09..2747e4037 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -74,6 +75,10 @@ class BigtableTableAdminAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = BigtableTableAdminClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = BigtableTableAdminClient._DEFAULT_UNIVERSE + authorized_view_path = staticmethod(BigtableTableAdminClient.authorized_view_path) + parse_authorized_view_path = staticmethod( + BigtableTableAdminClient.parse_authorized_view_path + ) backup_path = staticmethod(BigtableTableAdminClient.backup_path) parse_backup_path = staticmethod(BigtableTableAdminClient.parse_backup_path) cluster_path = staticmethod(BigtableTableAdminClient.cluster_path) @@ -221,7 +226,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, BigtableTableAdminTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, + BigtableTableAdminTransport, + Callable[..., BigtableTableAdminTransport], + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -233,9 +244,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.BigtableTableAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BigtableTableAdminTransport,Callable[..., BigtableTableAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BigtableTableAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -335,8 +348,8 @@ async def create_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, table_id, table]) if request is not None and has_flattened_params: raise ValueError( @@ -344,7 +357,10 @@ async def create_table( "the individual field arguments should be set." ) - request = bigtable_table_admin.CreateTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CreateTableRequest): + request = bigtable_table_admin.CreateTableRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -357,11 +373,9 @@ async def create_table( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_table, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -459,8 +473,8 @@ async def create_table_from_snapshot( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, table_id, source_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -468,7 +482,10 @@ async def create_table_from_snapshot( "the individual field arguments should be set." ) - request = bigtable_table_admin.CreateTableFromSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CreateTableFromSnapshotRequest): + request = bigtable_table_admin.CreateTableFromSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -481,11 +498,9 @@ async def create_table_from_snapshot( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_table_from_snapshot, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_table_from_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -554,8 +569,8 @@ async def list_tables( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -563,7 +578,10 @@ async def list_tables( "the individual field arguments should be set." ) - request = bigtable_table_admin.ListTablesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.ListTablesRequest): + request = bigtable_table_admin.ListTablesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -572,21 +590,9 @@ async def list_tables( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tables, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_tables + ] # Certain fields should be provided within the metadata header; # add these here. @@ -655,8 +661,8 @@ async def get_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -664,7 +670,10 @@ async def get_table( "the individual field arguments should be set." ) - request = bigtable_table_admin.GetTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.GetTableRequest): + request = bigtable_table_admin.GetTableRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -673,21 +682,9 @@ async def get_table( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_table, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -767,8 +764,8 @@ async def update_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -776,7 +773,10 @@ async def update_table( "the individual field arguments should be set." ) - request = bigtable_table_admin.UpdateTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.UpdateTableRequest): + request = bigtable_table_admin.UpdateTableRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -787,11 +787,9 @@ async def update_table( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_table, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -854,8 +852,8 @@ async def delete_table( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -863,7 +861,10 @@ async def delete_table( "the individual field arguments should be set." ) - request = bigtable_table_admin.DeleteTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.DeleteTableRequest): + request = bigtable_table_admin.DeleteTableRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -872,11 +873,9 @@ async def delete_table( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_table, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -937,8 +936,8 @@ async def undelete_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -946,7 +945,10 @@ async def undelete_table( "the individual field arguments should be set." ) - request = bigtable_table_admin.UndeleteTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.UndeleteTableRequest): + request = bigtable_table_admin.UndeleteTableRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -955,11 +957,9 @@ async def undelete_table( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.undelete_table, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -989,6 +989,494 @@ async def undelete_table( # Done; return the response. return response + async def create_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.CreateAuthorizedViewRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + authorized_view: Optional[table.AuthorizedView] = None, + authorized_view_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new AuthorizedView in a table. + + Args: + request (Optional[Union[google.cloud.bigtable_admin_v2.types.CreateAuthorizedViewRequest, dict]]): + The request object. The request for + [CreateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.CreateAuthorizedView] + parent (:class:`str`): + Required. This is the name of the table the + AuthorizedView belongs to. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorized_view (:class:`google.cloud.bigtable_admin_v2.types.AuthorizedView`): + Required. The AuthorizedView to + create. + + This corresponds to the ``authorized_view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorized_view_id (:class:`str`): + Required. The id of the AuthorizedView to create. This + AuthorizedView must not already exist. The + ``authorized_view_id`` appended to ``parent`` forms the + full AuthorizedView name of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedView/{authorized_view}``. + + This corresponds to the ``authorized_view_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.bigtable_admin_v2.types.AuthorizedView` AuthorizedViews represent subsets of a particular Cloud Bigtable table. Users + can configure access to each Authorized View + independently from the table and use the existing + Data APIs to access the subset of data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, authorized_view, authorized_view_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CreateAuthorizedViewRequest): + request = bigtable_table_admin.CreateAuthorizedViewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if authorized_view is not None: + request.authorized_view = authorized_view + if authorized_view_id is not None: + request.authorized_view_id = authorized_view_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_authorized_view + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + table.AuthorizedView, + metadata_type=bigtable_table_admin.CreateAuthorizedViewMetadata, + ) + + # Done; return the response. + return response + + async def list_authorized_views( + self, + request: Optional[ + Union[bigtable_table_admin.ListAuthorizedViewsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAuthorizedViewsAsyncPager: + r"""Lists all AuthorizedViews from a specific table. + + Args: + request (Optional[Union[google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsRequest, dict]]): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + parent (:class:`str`): + Required. The unique name of the table for which + AuthorizedViews should be listed. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigtable_admin_v2.services.bigtable_table_admin.pagers.ListAuthorizedViewsAsyncPager: + Response message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.ListAuthorizedViewsRequest): + request = bigtable_table_admin.ListAuthorizedViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_authorized_views + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAuthorizedViewsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.GetAuthorizedViewRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> table.AuthorizedView: + r"""Gets information from a specified AuthorizedView. + + Args: + request (Optional[Union[google.cloud.bigtable_admin_v2.types.GetAuthorizedViewRequest, dict]]): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView] + name (:class:`str`): + Required. The unique name of the requested + AuthorizedView. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigtable_admin_v2.types.AuthorizedView: + AuthorizedViews represent subsets of + a particular Cloud Bigtable table. Users + can configure access to each Authorized + View independently from the table and + use the existing Data APIs to access the + subset of data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.GetAuthorizedViewRequest): + request = bigtable_table_admin.GetAuthorizedViewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_authorized_view + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.UpdateAuthorizedViewRequest, dict] + ] = None, + *, + authorized_view: Optional[table.AuthorizedView] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an AuthorizedView in a table. + + Args: + request (Optional[Union[google.cloud.bigtable_admin_v2.types.UpdateAuthorizedViewRequest, dict]]): + The request object. The request for + [UpdateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.UpdateAuthorizedView]. + authorized_view (:class:`google.cloud.bigtable_admin_v2.types.AuthorizedView`): + Required. The AuthorizedView to update. The ``name`` in + ``authorized_view`` is used to identify the + AuthorizedView. AuthorizedView name must in this format + projects//instances//tables//authorizedViews/ + + This corresponds to the ``authorized_view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. A mask + specifying which fields in the AuthorizedView resource + should be updated. This mask is relative to the + AuthorizedView resource, not to the request message. A + field will be overwritten if it is in the mask. If + empty, all fields set in the request will be + overwritten. A special value ``*`` means to overwrite + all fields (including fields not set in the request). + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.bigtable_admin_v2.types.AuthorizedView` AuthorizedViews represent subsets of a particular Cloud Bigtable table. Users + can configure access to each Authorized View + independently from the table and use the existing + Data APIs to access the subset of data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([authorized_view, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.UpdateAuthorizedViewRequest): + request = bigtable_table_admin.UpdateAuthorizedViewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if authorized_view is not None: + request.authorized_view = authorized_view + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_authorized_view + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("authorized_view.name", request.authorized_view.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + table.AuthorizedView, + metadata_type=bigtable_table_admin.UpdateAuthorizedViewMetadata, + ) + + # Done; return the response. + return response + + async def delete_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.DeleteAuthorizedViewRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Permanently deletes a specified AuthorizedView. + + Args: + request (Optional[Union[google.cloud.bigtable_admin_v2.types.DeleteAuthorizedViewRequest, dict]]): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView] + name (:class:`str`): + Required. The unique name of the AuthorizedView to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.DeleteAuthorizedViewRequest): + request = bigtable_table_admin.DeleteAuthorizedViewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_authorized_view + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def modify_column_families( self, request: Optional[ @@ -1050,8 +1538,8 @@ async def modify_column_families( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, modifications]) if request is not None and has_flattened_params: raise ValueError( @@ -1059,7 +1547,10 @@ async def modify_column_families( "the individual field arguments should be set." ) - request = bigtable_table_admin.ModifyColumnFamiliesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.ModifyColumnFamiliesRequest): + request = bigtable_table_admin.ModifyColumnFamiliesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1070,11 +1561,9 @@ async def modify_column_families( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_column_families, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_column_families + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1120,15 +1609,16 @@ async def drop_row_range( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = bigtable_table_admin.DropRowRangeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.DropRowRangeRequest): + request = bigtable_table_admin.DropRowRangeRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.drop_row_range, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.drop_row_range + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1189,8 +1679,8 @@ async def generate_consistency_token( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1198,7 +1688,12 @@ async def generate_consistency_token( "the individual field arguments should be set." ) - request = bigtable_table_admin.GenerateConsistencyTokenRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, bigtable_table_admin.GenerateConsistencyTokenRequest + ): + request = bigtable_table_admin.GenerateConsistencyTokenRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1207,21 +1702,9 @@ async def generate_consistency_token( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.generate_consistency_token, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_consistency_token + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1292,8 +1775,8 @@ async def check_consistency( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, consistency_token]) if request is not None and has_flattened_params: raise ValueError( @@ -1301,7 +1784,10 @@ async def check_consistency( "the individual field arguments should be set." ) - request = bigtable_table_admin.CheckConsistencyRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CheckConsistencyRequest): + request = bigtable_table_admin.CheckConsistencyRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1312,21 +1798,9 @@ async def check_consistency( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.check_consistency, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.check_consistency + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1438,8 +1912,8 @@ async def snapshot_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, cluster, snapshot_id, description]) if request is not None and has_flattened_params: raise ValueError( @@ -1447,7 +1921,10 @@ async def snapshot_table( "the individual field arguments should be set." ) - request = bigtable_table_admin.SnapshotTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.SnapshotTableRequest): + request = bigtable_table_admin.SnapshotTableRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1462,11 +1939,9 @@ async def snapshot_table( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.snapshot_table, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.snapshot_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1557,8 +2032,8 @@ async def get_snapshot( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1566,7 +2041,10 @@ async def get_snapshot( "the individual field arguments should be set." ) - request = bigtable_table_admin.GetSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.GetSnapshotRequest): + request = bigtable_table_admin.GetSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1575,21 +2053,9 @@ async def get_snapshot( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_snapshot, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1676,8 +2142,8 @@ async def list_snapshots( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1685,7 +2151,10 @@ async def list_snapshots( "the individual field arguments should be set." ) - request = bigtable_table_admin.ListSnapshotsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.ListSnapshotsRequest): + request = bigtable_table_admin.ListSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1694,21 +2163,9 @@ async def list_snapshots( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_snapshots, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_snapshots + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1785,8 +2242,8 @@ async def delete_snapshot( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1794,7 +2251,10 @@ async def delete_snapshot( "the individual field arguments should be set." ) - request = bigtable_table_admin.DeleteSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.DeleteSnapshotRequest): + request = bigtable_table_admin.DeleteSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1803,11 +2263,9 @@ async def delete_snapshot( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_snapshot, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1893,8 +2351,8 @@ async def create_backup( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, backup_id, backup]) if request is not None and has_flattened_params: raise ValueError( @@ -1902,7 +2360,10 @@ async def create_backup( "the individual field arguments should be set." ) - request = bigtable_table_admin.CreateBackupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CreateBackupRequest): + request = bigtable_table_admin.CreateBackupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1915,11 +2376,9 @@ async def create_backup( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_backup, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1983,8 +2442,8 @@ async def get_backup( A backup of a Cloud Bigtable table. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1992,7 +2451,10 @@ async def get_backup( "the individual field arguments should be set." ) - request = bigtable_table_admin.GetBackupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.GetBackupRequest): + request = bigtable_table_admin.GetBackupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2001,21 +2463,9 @@ async def get_backup( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2087,8 +2537,8 @@ async def update_backup( A backup of a Cloud Bigtable table. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([backup, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2096,7 +2546,10 @@ async def update_backup( "the individual field arguments should be set." ) - request = bigtable_table_admin.UpdateBackupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.UpdateBackupRequest): + request = bigtable_table_admin.UpdateBackupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2107,11 +2560,9 @@ async def update_backup( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_backup, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2165,8 +2616,8 @@ async def delete_backup( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2174,7 +2625,10 @@ async def delete_backup( "the individual field arguments should be set." ) - request = bigtable_table_admin.DeleteBackupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.DeleteBackupRequest): + request = bigtable_table_admin.DeleteBackupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2183,11 +2637,9 @@ async def delete_backup( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_backup, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2249,8 +2701,8 @@ async def list_backups( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2258,7 +2710,10 @@ async def list_backups( "the individual field arguments should be set." ) - request = bigtable_table_admin.ListBackupsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.ListBackupsRequest): + request = bigtable_table_admin.ListBackupsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2267,21 +2722,9 @@ async def list_backups( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_backups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backups + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2349,15 +2792,16 @@ async def restore_table( """ # Create or coerce a protobuf request object. - request = bigtable_table_admin.RestoreTableRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.RestoreTableRequest): + request = bigtable_table_admin.RestoreTableRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.restore_table, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.restore_table + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2467,8 +2911,8 @@ async def copy_backup( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, backup_id, source_backup, expire_time]) if request is not None and has_flattened_params: raise ValueError( @@ -2476,7 +2920,10 @@ async def copy_backup( "the individual field arguments should be set." ) - request = bigtable_table_admin.CopyBackupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CopyBackupRequest): + request = bigtable_table_admin.CopyBackupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2491,11 +2938,9 @@ async def copy_backup( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.copy_backup, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.copy_backup + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2591,8 +3036,8 @@ async def get_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2600,32 +3045,18 @@ async def get_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, - ) + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2712,8 +3143,8 @@ async def set_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2721,22 +3152,18 @@ async def set_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: - request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, - ) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2803,8 +3230,8 @@ async def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource, permissions]) if request is not None and has_flattened_params: raise ValueError( @@ -2812,33 +3239,20 @@ async def test_iam_permissions( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, - permissions=permissions, + resource=resource, permissions=permissions ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=60.0, - multiplier=2, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py index 09a67e696..e9b06965c 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -194,6 +195,30 @@ def transport(self) -> BigtableTableAdminTransport: """ return self._transport + @staticmethod + def authorized_view_path( + project: str, + instance: str, + table: str, + authorized_view: str, + ) -> str: + """Returns a fully-qualified authorized_view string.""" + return "projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}".format( + project=project, + instance=instance, + table=table, + authorized_view=authorized_view, + ) + + @staticmethod + def parse_authorized_view_path(path: str) -> Dict[str, str]: + """Parses a authorized_view path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/instances/(?P.+?)/tables/(?P.+?)/authorizedViews/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def backup_path( project: str, @@ -654,7 +679,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BigtableTableAdminTransport]] = None, + transport: Optional[ + Union[ + str, + BigtableTableAdminTransport, + Callable[..., BigtableTableAdminTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -666,9 +697,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, BigtableTableAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BigtableTableAdminTransport,Callable[..., BigtableTableAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BigtableTableAdminTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -777,8 +810,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[BigtableTableAdminTransport], + Callable[..., BigtableTableAdminTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BigtableTableAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -846,8 +887,8 @@ def create_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, table_id, table]) if request is not None and has_flattened_params: raise ValueError( @@ -855,10 +896,8 @@ def create_table( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.CreateTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.CreateTableRequest): request = bigtable_table_admin.CreateTableRequest(request) # If we have keyword arguments corresponding to fields on the @@ -970,8 +1009,8 @@ def create_table_from_snapshot( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, table_id, source_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -979,10 +1018,8 @@ def create_table_from_snapshot( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.CreateTableFromSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.CreateTableFromSnapshotRequest): request = bigtable_table_admin.CreateTableFromSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1067,8 +1104,8 @@ def list_tables( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1076,10 +1113,8 @@ def list_tables( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.ListTablesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.ListTablesRequest): request = bigtable_table_admin.ListTablesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1158,8 +1193,8 @@ def get_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1167,10 +1202,8 @@ def get_table( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.GetTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.GetTableRequest): request = bigtable_table_admin.GetTableRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1260,8 +1293,8 @@ def update_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1269,10 +1302,8 @@ def update_table( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.UpdateTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.UpdateTableRequest): request = bigtable_table_admin.UpdateTableRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1347,8 +1378,8 @@ def delete_table( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1356,10 +1387,8 @@ def delete_table( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.DeleteTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.DeleteTableRequest): request = bigtable_table_admin.DeleteTableRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1430,8 +1459,8 @@ def undelete_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1439,10 +1468,8 @@ def undelete_table( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.UndeleteTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.UndeleteTableRequest): request = bigtable_table_admin.UndeleteTableRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1482,6 +1509,479 @@ def undelete_table( # Done; return the response. return response + def create_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.CreateAuthorizedViewRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + authorized_view: Optional[table.AuthorizedView] = None, + authorized_view_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new AuthorizedView in a table. + + Args: + request (Union[google.cloud.bigtable_admin_v2.types.CreateAuthorizedViewRequest, dict]): + The request object. The request for + [CreateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.CreateAuthorizedView] + parent (str): + Required. This is the name of the table the + AuthorizedView belongs to. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorized_view (google.cloud.bigtable_admin_v2.types.AuthorizedView): + Required. The AuthorizedView to + create. + + This corresponds to the ``authorized_view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + authorized_view_id (str): + Required. The id of the AuthorizedView to create. This + AuthorizedView must not already exist. The + ``authorized_view_id`` appended to ``parent`` forms the + full AuthorizedView name of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedView/{authorized_view}``. + + This corresponds to the ``authorized_view_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.bigtable_admin_v2.types.AuthorizedView` AuthorizedViews represent subsets of a particular Cloud Bigtable table. Users + can configure access to each Authorized View + independently from the table and use the existing + Data APIs to access the subset of data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, authorized_view, authorized_view_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.CreateAuthorizedViewRequest): + request = bigtable_table_admin.CreateAuthorizedViewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if authorized_view is not None: + request.authorized_view = authorized_view + if authorized_view_id is not None: + request.authorized_view_id = authorized_view_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_authorized_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + table.AuthorizedView, + metadata_type=bigtable_table_admin.CreateAuthorizedViewMetadata, + ) + + # Done; return the response. + return response + + def list_authorized_views( + self, + request: Optional[ + Union[bigtable_table_admin.ListAuthorizedViewsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAuthorizedViewsPager: + r"""Lists all AuthorizedViews from a specific table. + + Args: + request (Union[google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsRequest, dict]): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + parent (str): + Required. The unique name of the table for which + AuthorizedViews should be listed. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigtable_admin_v2.services.bigtable_table_admin.pagers.ListAuthorizedViewsPager: + Response message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.ListAuthorizedViewsRequest): + request = bigtable_table_admin.ListAuthorizedViewsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_authorized_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAuthorizedViewsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.GetAuthorizedViewRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> table.AuthorizedView: + r"""Gets information from a specified AuthorizedView. + + Args: + request (Union[google.cloud.bigtable_admin_v2.types.GetAuthorizedViewRequest, dict]): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView] + name (str): + Required. The unique name of the requested + AuthorizedView. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigtable_admin_v2.types.AuthorizedView: + AuthorizedViews represent subsets of + a particular Cloud Bigtable table. Users + can configure access to each Authorized + View independently from the table and + use the existing Data APIs to access the + subset of data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.GetAuthorizedViewRequest): + request = bigtable_table_admin.GetAuthorizedViewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_authorized_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.UpdateAuthorizedViewRequest, dict] + ] = None, + *, + authorized_view: Optional[table.AuthorizedView] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an AuthorizedView in a table. + + Args: + request (Union[google.cloud.bigtable_admin_v2.types.UpdateAuthorizedViewRequest, dict]): + The request object. The request for + [UpdateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.UpdateAuthorizedView]. + authorized_view (google.cloud.bigtable_admin_v2.types.AuthorizedView): + Required. The AuthorizedView to update. The ``name`` in + ``authorized_view`` is used to identify the + AuthorizedView. AuthorizedView name must in this format + projects//instances//tables//authorizedViews/ + + This corresponds to the ``authorized_view`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. A mask + specifying which fields in the AuthorizedView resource + should be updated. This mask is relative to the + AuthorizedView resource, not to the request message. A + field will be overwritten if it is in the mask. If + empty, all fields set in the request will be + overwritten. A special value ``*`` means to overwrite + all fields (including fields not set in the request). + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.bigtable_admin_v2.types.AuthorizedView` AuthorizedViews represent subsets of a particular Cloud Bigtable table. Users + can configure access to each Authorized View + independently from the table and use the existing + Data APIs to access the subset of data. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([authorized_view, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.UpdateAuthorizedViewRequest): + request = bigtable_table_admin.UpdateAuthorizedViewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if authorized_view is not None: + request.authorized_view = authorized_view + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_authorized_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("authorized_view.name", request.authorized_view.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + table.AuthorizedView, + metadata_type=bigtable_table_admin.UpdateAuthorizedViewMetadata, + ) + + # Done; return the response. + return response + + def delete_authorized_view( + self, + request: Optional[ + Union[bigtable_table_admin.DeleteAuthorizedViewRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Permanently deletes a specified AuthorizedView. + + Args: + request (Union[google.cloud.bigtable_admin_v2.types.DeleteAuthorizedViewRequest, dict]): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView] + name (str): + Required. The unique name of the AuthorizedView to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, bigtable_table_admin.DeleteAuthorizedViewRequest): + request = bigtable_table_admin.DeleteAuthorizedViewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_authorized_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def modify_column_families( self, request: Optional[ @@ -1543,8 +2043,8 @@ def modify_column_families( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, modifications]) if request is not None and has_flattened_params: raise ValueError( @@ -1552,10 +2052,8 @@ def modify_column_families( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.ModifyColumnFamiliesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.ModifyColumnFamiliesRequest): request = bigtable_table_admin.ModifyColumnFamiliesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1613,10 +2111,8 @@ def drop_row_range( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.DropRowRangeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.DropRowRangeRequest): request = bigtable_table_admin.DropRowRangeRequest(request) @@ -1683,8 +2179,8 @@ def generate_consistency_token( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1692,10 +2188,8 @@ def generate_consistency_token( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.GenerateConsistencyTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, bigtable_table_admin.GenerateConsistencyTokenRequest ): @@ -1780,8 +2274,8 @@ def check_consistency( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, consistency_token]) if request is not None and has_flattened_params: raise ValueError( @@ -1789,10 +2283,8 @@ def check_consistency( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.CheckConsistencyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.CheckConsistencyRequest): request = bigtable_table_admin.CheckConsistencyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1916,8 +2408,8 @@ def snapshot_table( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, cluster, snapshot_id, description]) if request is not None and has_flattened_params: raise ValueError( @@ -1925,10 +2417,8 @@ def snapshot_table( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.SnapshotTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.SnapshotTableRequest): request = bigtable_table_admin.SnapshotTableRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2035,8 +2525,8 @@ def get_snapshot( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2044,10 +2534,8 @@ def get_snapshot( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.GetSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.GetSnapshotRequest): request = bigtable_table_admin.GetSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2144,8 +2632,8 @@ def list_snapshots( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2153,10 +2641,8 @@ def list_snapshots( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.ListSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.ListSnapshotsRequest): request = bigtable_table_admin.ListSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2243,8 +2729,8 @@ def delete_snapshot( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2252,10 +2738,8 @@ def delete_snapshot( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.DeleteSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.DeleteSnapshotRequest): request = bigtable_table_admin.DeleteSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2351,8 +2835,8 @@ def create_backup( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, backup_id, backup]) if request is not None and has_flattened_params: raise ValueError( @@ -2360,10 +2844,8 @@ def create_backup( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.CreateBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.CreateBackupRequest): request = bigtable_table_admin.CreateBackupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2441,8 +2923,8 @@ def get_backup( A backup of a Cloud Bigtable table. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2450,10 +2932,8 @@ def get_backup( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.GetBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.GetBackupRequest): request = bigtable_table_admin.GetBackupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2535,8 +3015,8 @@ def update_backup( A backup of a Cloud Bigtable table. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([backup, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2544,10 +3024,8 @@ def update_backup( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.UpdateBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.UpdateBackupRequest): request = bigtable_table_admin.UpdateBackupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2613,8 +3091,8 @@ def delete_backup( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2622,10 +3100,8 @@ def delete_backup( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.DeleteBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.DeleteBackupRequest): request = bigtable_table_admin.DeleteBackupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2697,8 +3173,8 @@ def list_backups( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2706,10 +3182,8 @@ def list_backups( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.ListBackupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.ListBackupsRequest): request = bigtable_table_admin.ListBackupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2787,10 +3261,8 @@ def restore_table( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.RestoreTableRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.RestoreTableRequest): request = bigtable_table_admin.RestoreTableRequest(request) @@ -2906,8 +3378,8 @@ def copy_backup( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, backup_id, source_backup, expire_time]) if request is not None and has_flattened_params: raise ValueError( @@ -2915,10 +3387,8 @@ def copy_backup( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable_table_admin.CopyBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable_table_admin.CopyBackupRequest): request = bigtable_table_admin.CopyBackupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3030,8 +3500,8 @@ def get_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3040,8 +3510,8 @@ def get_iam_policy( ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: # Null request, just make one. @@ -3138,8 +3608,8 @@ def set_iam_policy( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3148,8 +3618,8 @@ def set_iam_policy( ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: # Null request, just make one. @@ -3226,8 +3696,8 @@ def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource, permissions]) if request is not None and has_flattened_params: raise ValueError( @@ -3236,8 +3706,8 @@ def test_iam_permissions( ) if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: # Null request, just make one. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py index 331647b4c..d6277bce2 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -156,6 +156,138 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListAuthorizedViewsPager: + """A pager for iterating through ``list_authorized_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``authorized_views`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAuthorizedViews`` requests and continue to iterate + through the ``authorized_views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., bigtable_table_admin.ListAuthorizedViewsResponse], + request: bigtable_table_admin.ListAuthorizedViewsRequest, + response: bigtable_table_admin.ListAuthorizedViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsRequest): + The initial request object. + response (google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = bigtable_table_admin.ListAuthorizedViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[bigtable_table_admin.ListAuthorizedViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[table.AuthorizedView]: + for page in self.pages: + yield from page.authorized_views + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAuthorizedViewsAsyncPager: + """A pager for iterating through ``list_authorized_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``authorized_views`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAuthorizedViews`` requests and continue to iterate + through the ``authorized_views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[bigtable_table_admin.ListAuthorizedViewsResponse] + ], + request: bigtable_table_admin.ListAuthorizedViewsRequest, + response: bigtable_table_admin.ListAuthorizedViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsRequest): + The initial request object. + response (google.cloud.bigtable_admin_v2.types.ListAuthorizedViewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = bigtable_table_admin.ListAuthorizedViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[bigtable_table_admin.ListAuthorizedViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[table.AuthorizedView]: + async def async_generator(): + async for page in self.pages: + for response in page.authorized_views: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListSnapshotsPager: """A pager for iterating through ``list_snapshots`` requests. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py index be4aa8d2a..11a7f8329 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py index e0313a946..1ec3be85e 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -188,7 +188,7 @@ def _prep_wrapped_messages(self, client_info): ), self.delete_table: gapic_v1.method.wrap_method( self.delete_table, - default_timeout=60.0, + default_timeout=300.0, client_info=client_info, ), self.undelete_table: gapic_v1.method.wrap_method( @@ -196,6 +196,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_authorized_view: gapic_v1.method.wrap_method( + self.create_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.list_authorized_views: gapic_v1.method.wrap_method( + self.list_authorized_views, + default_timeout=None, + client_info=client_info, + ), + self.get_authorized_view: gapic_v1.method.wrap_method( + self.get_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.update_authorized_view: gapic_v1.method.wrap_method( + self.update_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_authorized_view: gapic_v1.method.wrap_method( + self.delete_authorized_view, + default_timeout=None, + client_info=client_info, + ), self.modify_column_families: gapic_v1.method.wrap_method( self.modify_column_families, default_timeout=300.0, @@ -273,7 +298,7 @@ def _prep_wrapped_messages(self, client_info): ), self.delete_snapshot: gapic_v1.method.wrap_method( self.delete_snapshot, - default_timeout=60.0, + default_timeout=300.0, client_info=client_info, ), self.create_backup: gapic_v1.method.wrap_method( @@ -303,7 +328,7 @@ def _prep_wrapped_messages(self, client_info): ), self.delete_backup: gapic_v1.method.wrap_method( self.delete_backup, - default_timeout=60.0, + default_timeout=300.0, client_info=client_info, ), self.list_backups: gapic_v1.method.wrap_method( @@ -448,6 +473,54 @@ def undelete_table( ]: raise NotImplementedError() + @property + def create_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.CreateAuthorizedViewRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_authorized_views( + self, + ) -> Callable[ + [bigtable_table_admin.ListAuthorizedViewsRequest], + Union[ + bigtable_table_admin.ListAuthorizedViewsResponse, + Awaitable[bigtable_table_admin.ListAuthorizedViewsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.GetAuthorizedViewRequest], + Union[table.AuthorizedView, Awaitable[table.AuthorizedView]], + ]: + raise NotImplementedError() + + @property + def update_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.UpdateAuthorizedViewRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.DeleteAuthorizedViewRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def modify_column_families( self, diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py index b0c33eca9..01cec4e0b 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -61,7 +61,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -81,14 +81,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -98,11 +101,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -129,7 +132,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -170,7 +173,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -457,6 +462,145 @@ def undelete_table( ) return self._stubs["undelete_table"] + @property + def create_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.CreateAuthorizedViewRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create authorized view method over gRPC. + + Creates a new AuthorizedView in a table. + + Returns: + Callable[[~.CreateAuthorizedViewRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_authorized_view" not in self._stubs: + self._stubs["create_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/CreateAuthorizedView", + request_serializer=bigtable_table_admin.CreateAuthorizedViewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_authorized_view"] + + @property + def list_authorized_views( + self, + ) -> Callable[ + [bigtable_table_admin.ListAuthorizedViewsRequest], + bigtable_table_admin.ListAuthorizedViewsResponse, + ]: + r"""Return a callable for the list authorized views method over gRPC. + + Lists all AuthorizedViews from a specific table. + + Returns: + Callable[[~.ListAuthorizedViewsRequest], + ~.ListAuthorizedViewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_authorized_views" not in self._stubs: + self._stubs["list_authorized_views"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/ListAuthorizedViews", + request_serializer=bigtable_table_admin.ListAuthorizedViewsRequest.serialize, + response_deserializer=bigtable_table_admin.ListAuthorizedViewsResponse.deserialize, + ) + return self._stubs["list_authorized_views"] + + @property + def get_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.GetAuthorizedViewRequest], table.AuthorizedView + ]: + r"""Return a callable for the get authorized view method over gRPC. + + Gets information from a specified AuthorizedView. + + Returns: + Callable[[~.GetAuthorizedViewRequest], + ~.AuthorizedView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_authorized_view" not in self._stubs: + self._stubs["get_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/GetAuthorizedView", + request_serializer=bigtable_table_admin.GetAuthorizedViewRequest.serialize, + response_deserializer=table.AuthorizedView.deserialize, + ) + return self._stubs["get_authorized_view"] + + @property + def update_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.UpdateAuthorizedViewRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update authorized view method over gRPC. + + Updates an AuthorizedView in a table. + + Returns: + Callable[[~.UpdateAuthorizedViewRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_authorized_view" not in self._stubs: + self._stubs["update_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/UpdateAuthorizedView", + request_serializer=bigtable_table_admin.UpdateAuthorizedViewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_authorized_view"] + + @property + def delete_authorized_view( + self, + ) -> Callable[[bigtable_table_admin.DeleteAuthorizedViewRequest], empty_pb2.Empty]: + r"""Return a callable for the delete authorized view method over gRPC. + + Permanently deletes a specified AuthorizedView. + + Returns: + Callable[[~.DeleteAuthorizedViewRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_authorized_view" not in self._stubs: + self._stubs["delete_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/DeleteAuthorizedView", + request_serializer=bigtable_table_admin.DeleteAuthorizedViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_authorized_view"] + @property def modify_column_families( self, diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py index 3ae66f84f..f20ed0a49 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -76,7 +78,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -106,7 +107,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -126,15 +127,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -144,11 +148,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -175,7 +179,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -215,7 +219,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -469,6 +475,149 @@ def undelete_table( ) return self._stubs["undelete_table"] + @property + def create_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.CreateAuthorizedViewRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create authorized view method over gRPC. + + Creates a new AuthorizedView in a table. + + Returns: + Callable[[~.CreateAuthorizedViewRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_authorized_view" not in self._stubs: + self._stubs["create_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/CreateAuthorizedView", + request_serializer=bigtable_table_admin.CreateAuthorizedViewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_authorized_view"] + + @property + def list_authorized_views( + self, + ) -> Callable[ + [bigtable_table_admin.ListAuthorizedViewsRequest], + Awaitable[bigtable_table_admin.ListAuthorizedViewsResponse], + ]: + r"""Return a callable for the list authorized views method over gRPC. + + Lists all AuthorizedViews from a specific table. + + Returns: + Callable[[~.ListAuthorizedViewsRequest], + Awaitable[~.ListAuthorizedViewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_authorized_views" not in self._stubs: + self._stubs["list_authorized_views"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/ListAuthorizedViews", + request_serializer=bigtable_table_admin.ListAuthorizedViewsRequest.serialize, + response_deserializer=bigtable_table_admin.ListAuthorizedViewsResponse.deserialize, + ) + return self._stubs["list_authorized_views"] + + @property + def get_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.GetAuthorizedViewRequest], Awaitable[table.AuthorizedView] + ]: + r"""Return a callable for the get authorized view method over gRPC. + + Gets information from a specified AuthorizedView. + + Returns: + Callable[[~.GetAuthorizedViewRequest], + Awaitable[~.AuthorizedView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_authorized_view" not in self._stubs: + self._stubs["get_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/GetAuthorizedView", + request_serializer=bigtable_table_admin.GetAuthorizedViewRequest.serialize, + response_deserializer=table.AuthorizedView.deserialize, + ) + return self._stubs["get_authorized_view"] + + @property + def update_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.UpdateAuthorizedViewRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update authorized view method over gRPC. + + Updates an AuthorizedView in a table. + + Returns: + Callable[[~.UpdateAuthorizedViewRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_authorized_view" not in self._stubs: + self._stubs["update_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/UpdateAuthorizedView", + request_serializer=bigtable_table_admin.UpdateAuthorizedViewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_authorized_view"] + + @property + def delete_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.DeleteAuthorizedViewRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete authorized view method over gRPC. + + Permanently deletes a specified AuthorizedView. + + Returns: + Callable[[~.DeleteAuthorizedViewRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_authorized_view" not in self._stubs: + self._stubs["delete_authorized_view"] = self.grpc_channel.unary_unary( + "/google.bigtable.admin.v2.BigtableTableAdmin/DeleteAuthorizedView", + request_serializer=bigtable_table_admin.DeleteAuthorizedViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_authorized_view"] + @property def modify_column_families( self, @@ -1035,6 +1184,261 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_table: gapic_v1.method_async.wrap_method( + self.create_table, + default_timeout=300.0, + client_info=client_info, + ), + self.create_table_from_snapshot: gapic_v1.method_async.wrap_method( + self.create_table_from_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.list_tables: gapic_v1.method_async.wrap_method( + self.list_tables, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_table: gapic_v1.method_async.wrap_method( + self.get_table, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_table: gapic_v1.method_async.wrap_method( + self.update_table, + default_timeout=None, + client_info=client_info, + ), + self.delete_table: gapic_v1.method_async.wrap_method( + self.delete_table, + default_timeout=300.0, + client_info=client_info, + ), + self.undelete_table: gapic_v1.method_async.wrap_method( + self.undelete_table, + default_timeout=None, + client_info=client_info, + ), + self.create_authorized_view: gapic_v1.method_async.wrap_method( + self.create_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.list_authorized_views: gapic_v1.method_async.wrap_method( + self.list_authorized_views, + default_timeout=None, + client_info=client_info, + ), + self.get_authorized_view: gapic_v1.method_async.wrap_method( + self.get_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.update_authorized_view: gapic_v1.method_async.wrap_method( + self.update_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_authorized_view: gapic_v1.method_async.wrap_method( + self.delete_authorized_view, + default_timeout=None, + client_info=client_info, + ), + self.modify_column_families: gapic_v1.method_async.wrap_method( + self.modify_column_families, + default_timeout=300.0, + client_info=client_info, + ), + self.drop_row_range: gapic_v1.method_async.wrap_method( + self.drop_row_range, + default_timeout=3600.0, + client_info=client_info, + ), + self.generate_consistency_token: gapic_v1.method_async.wrap_method( + self.generate_consistency_token, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.check_consistency: gapic_v1.method_async.wrap_method( + self.check_consistency, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.snapshot_table: gapic_v1.method_async.wrap_method( + self.snapshot_table, + default_timeout=None, + client_info=client_info, + ), + self.get_snapshot: gapic_v1.method_async.wrap_method( + self.get_snapshot, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method_async.wrap_method( + self.list_snapshots, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method_async.wrap_method( + self.delete_snapshot, + default_timeout=300.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method_async.wrap_method( + self.create_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method_async.wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method_async.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method_async.wrap_method( + self.delete_backup, + default_timeout=300.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method_async.wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.restore_table: gapic_v1.method_async.wrap_method( + self.restore_table, + default_timeout=60.0, + client_info=client_info, + ), + self.copy_backup: gapic_v1.method_async.wrap_method( + self.copy_backup, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method_async.wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method_async.wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method_async.wrap_method( + self.test_iam_permissions, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py index 49bc756e1..230b13a43 100644 --- a/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py +++ b/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py @@ -92,6 +92,14 @@ def post_copy_backup(self, response): logging.log(f"Received response: {response}") return response + def pre_create_authorized_view(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_authorized_view(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -116,6 +124,10 @@ def post_create_table_from_snapshot(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_authorized_view(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -140,6 +152,14 @@ def post_generate_consistency_token(self, response): logging.log(f"Received response: {response}") return response + def pre_get_authorized_view(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_authorized_view(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -172,6 +192,14 @@ def post_get_table(self, response): logging.log(f"Received response: {response}") return response + def pre_list_authorized_views(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_authorized_views(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_backups(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -244,6 +272,14 @@ def post_undelete_table(self, response): logging.log(f"Received response: {response}") return response + def pre_update_authorized_view(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_authorized_view(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_backup(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -312,6 +348,31 @@ def post_copy_backup( """ return response + def pre_create_authorized_view( + self, + request: bigtable_table_admin.CreateAuthorizedViewRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + bigtable_table_admin.CreateAuthorizedViewRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_authorized_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the BigtableTableAdmin server. + """ + return request, metadata + + def post_create_authorized_view( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_authorized_view + + Override in a subclass to manipulate the response + after it is returned by the BigtableTableAdmin server but before + it is returned to user code. + """ + return response + def pre_create_backup( self, request: bigtable_table_admin.CreateBackupRequest, @@ -381,6 +442,20 @@ def post_create_table_from_snapshot( """ return response + def pre_delete_authorized_view( + self, + request: bigtable_table_admin.DeleteAuthorizedViewRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + bigtable_table_admin.DeleteAuthorizedViewRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_authorized_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the BigtableTableAdmin server. + """ + return request, metadata + def pre_delete_backup( self, request: bigtable_table_admin.DeleteBackupRequest, @@ -454,6 +529,31 @@ def post_generate_consistency_token( """ return response + def pre_get_authorized_view( + self, + request: bigtable_table_admin.GetAuthorizedViewRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + bigtable_table_admin.GetAuthorizedViewRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_authorized_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the BigtableTableAdmin server. + """ + return request, metadata + + def post_get_authorized_view( + self, response: table.AuthorizedView + ) -> table.AuthorizedView: + """Post-rpc interceptor for get_authorized_view + + Override in a subclass to manipulate the response + after it is returned by the BigtableTableAdmin server but before + it is returned to user code. + """ + return response + def pre_get_backup( self, request: bigtable_table_admin.GetBackupRequest, @@ -538,6 +638,31 @@ def post_get_table(self, response: table.Table) -> table.Table: """ return response + def pre_list_authorized_views( + self, + request: bigtable_table_admin.ListAuthorizedViewsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + bigtable_table_admin.ListAuthorizedViewsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_authorized_views + + Override in a subclass to manipulate the request or metadata + before they are sent to the BigtableTableAdmin server. + """ + return request, metadata + + def post_list_authorized_views( + self, response: bigtable_table_admin.ListAuthorizedViewsResponse + ) -> bigtable_table_admin.ListAuthorizedViewsResponse: + """Post-rpc interceptor for list_authorized_views + + Override in a subclass to manipulate the response + after it is returned by the BigtableTableAdmin server but before + it is returned to user code. + """ + return response + def pre_list_backups( self, request: bigtable_table_admin.ListBackupsRequest, @@ -743,6 +868,31 @@ def post_undelete_table( """ return response + def pre_update_authorized_view( + self, + request: bigtable_table_admin.UpdateAuthorizedViewRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + bigtable_table_admin.UpdateAuthorizedViewRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_authorized_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the BigtableTableAdmin server. + """ + return request, metadata + + def post_update_authorized_view( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_authorized_view + + Override in a subclass to manipulate the response + after it is returned by the BigtableTableAdmin server but before + it is returned to user code. + """ + return response + def pre_update_backup( self, request: bigtable_table_admin.UpdateBackupRequest, @@ -1132,6 +1282,104 @@ def __call__( resp = self._interceptor.post_copy_backup(resp) return resp + class _CreateAuthorizedView(BigtableTableAdminRestStub): + def __hash__(self): + return hash("CreateAuthorizedView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "authorizedViewId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: bigtable_table_admin.CreateAuthorizedViewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create authorized view method over HTTP. + + Args: + request (~.bigtable_table_admin.CreateAuthorizedViewRequest): + The request object. The request for + [CreateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.CreateAuthorizedView] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/instances/*/tables/*}/authorizedViews", + "body": "authorized_view", + }, + ] + request, metadata = self._interceptor.pre_create_authorized_view( + request, metadata + ) + pb_request = bigtable_table_admin.CreateAuthorizedViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_authorized_view(resp) + return resp + class _CreateBackup(BigtableTableAdminRestStub): def __hash__(self): return hash("CreateBackup") @@ -1429,6 +1677,82 @@ def __call__( resp = self._interceptor.post_create_table_from_snapshot(resp) return resp + class _DeleteAuthorizedView(BigtableTableAdminRestStub): + def __hash__(self): + return hash("DeleteAuthorizedView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: bigtable_table_admin.DeleteAuthorizedViewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete authorized view method over HTTP. + + Args: + request (~.bigtable_table_admin.DeleteAuthorizedViewRequest): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/instances/*/tables/*/authorizedViews/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_authorized_view( + request, metadata + ) + pb_request = bigtable_table_admin.DeleteAuthorizedViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _DeleteBackup(BigtableTableAdminRestStub): def __hash__(self): return hash("DeleteBackup") @@ -1696,12 +2020,104 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v2/{name=projects/*/instances/*/tables/*}:dropRowRange", + "uri": "/v2/{name=projects/*/instances/*/tables/*}:dropRowRange", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_drop_row_range(request, metadata) + pb_request = bigtable_table_admin.DropRowRangeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GenerateConsistencyToken(BigtableTableAdminRestStub): + def __hash__(self): + return hash("GenerateConsistencyToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: bigtable_table_admin.GenerateConsistencyTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> bigtable_table_admin.GenerateConsistencyTokenResponse: + r"""Call the generate consistency + token method over HTTP. + + Args: + request (~.bigtable_table_admin.GenerateConsistencyTokenRequest): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken][google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.bigtable_table_admin.GenerateConsistencyTokenResponse: + Response message for + [google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken][google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken] + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/instances/*/tables/*}:generateConsistencyToken", "body": "*", }, ] - request, metadata = self._interceptor.pre_drop_row_range(request, metadata) - pb_request = bigtable_table_admin.DropRowRangeRequest.pb(request) + request, metadata = self._interceptor.pre_generate_consistency_token( + request, metadata + ) + pb_request = bigtable_table_admin.GenerateConsistencyTokenRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -1739,9 +2155,17 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GenerateConsistencyToken(BigtableTableAdminRestStub): + # Return the response + resp = bigtable_table_admin.GenerateConsistencyTokenResponse() + pb_resp = bigtable_table_admin.GenerateConsistencyTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_consistency_token(resp) + return resp + + class _GetAuthorizedView(BigtableTableAdminRestStub): def __hash__(self): - return hash("GenerateConsistencyToken") + return hash("GetAuthorizedView") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1755,52 +2179,47 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: bigtable_table_admin.GenerateConsistencyTokenRequest, + request: bigtable_table_admin.GetAuthorizedViewRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> bigtable_table_admin.GenerateConsistencyTokenResponse: - r"""Call the generate consistency - token method over HTTP. + ) -> table.AuthorizedView: + r"""Call the get authorized view method over HTTP. - Args: - request (~.bigtable_table_admin.GenerateConsistencyTokenRequest): - The request object. Request message for - [google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken][google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + Args: + request (~.bigtable_table_admin.GetAuthorizedViewRequest): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - Returns: - ~.bigtable_table_admin.GenerateConsistencyTokenResponse: - Response message for - [google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken][google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken] + Returns: + ~.table.AuthorizedView: + AuthorizedViews represent subsets of + a particular Cloud Bigtable table. Users + can configure access to each Authorized + View independently from the table and + use the existing Data APIs to access the + subset of data. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v2/{name=projects/*/instances/*/tables/*}:generateConsistencyToken", - "body": "*", + "method": "get", + "uri": "/v2/{name=projects/*/instances/*/tables/*/authorizedViews/*}", }, ] - request, metadata = self._interceptor.pre_generate_consistency_token( + request, metadata = self._interceptor.pre_get_authorized_view( request, metadata ) - pb_request = bigtable_table_admin.GenerateConsistencyTokenRequest.pb( - request - ) + pb_request = bigtable_table_admin.GetAuthorizedViewRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1823,7 +2242,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1832,11 +2250,11 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = bigtable_table_admin.GenerateConsistencyTokenResponse() - pb_resp = bigtable_table_admin.GenerateConsistencyTokenResponse.pb(resp) + resp = table.AuthorizedView() + pb_resp = table.AuthorizedView.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_generate_consistency_token(resp) + resp = self._interceptor.post_get_authorized_view(resp) return resp class _GetBackup(BigtableTableAdminRestStub): @@ -2293,6 +2711,96 @@ def __call__( resp = self._interceptor.post_get_table(resp) return resp + class _ListAuthorizedViews(BigtableTableAdminRestStub): + def __hash__(self): + return hash("ListAuthorizedViews") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: bigtable_table_admin.ListAuthorizedViewsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> bigtable_table_admin.ListAuthorizedViewsResponse: + r"""Call the list authorized views method over HTTP. + + Args: + request (~.bigtable_table_admin.ListAuthorizedViewsRequest): + The request object. Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.bigtable_table_admin.ListAuthorizedViewsResponse: + Response message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/instances/*/tables/*}/authorizedViews", + }, + ] + request, metadata = self._interceptor.pre_list_authorized_views( + request, metadata + ) + pb_request = bigtable_table_admin.ListAuthorizedViewsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = bigtable_table_admin.ListAuthorizedViewsResponse() + pb_resp = bigtable_table_admin.ListAuthorizedViewsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_authorized_views(resp) + return resp + class _ListBackups(BigtableTableAdminRestStub): def __hash__(self): return hash("ListBackups") @@ -3230,6 +3738,102 @@ def __call__( resp = self._interceptor.post_undelete_table(resp) return resp + class _UpdateAuthorizedView(BigtableTableAdminRestStub): + def __hash__(self): + return hash("UpdateAuthorizedView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: bigtable_table_admin.UpdateAuthorizedViewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update authorized view method over HTTP. + + Args: + request (~.bigtable_table_admin.UpdateAuthorizedViewRequest): + The request object. The request for + [UpdateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.UpdateAuthorizedView]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{authorized_view.name=projects/*/instances/*/tables/*/authorizedViews/*}", + "body": "authorized_view", + }, + ] + request, metadata = self._interceptor.pre_update_authorized_view( + request, metadata + ) + pb_request = bigtable_table_admin.UpdateAuthorizedViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_authorized_view(resp) + return resp + class _UpdateBackup(BigtableTableAdminRestStub): def __hash__(self): return hash("UpdateBackup") @@ -3440,6 +4044,16 @@ def copy_backup( # In C++ this would require a dynamic_cast return self._CopyBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def create_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.CreateAuthorizedViewRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAuthorizedView(self._session, self._host, self._interceptor) # type: ignore + @property def create_backup( self, @@ -3466,6 +4080,14 @@ def create_table_from_snapshot( # In C++ this would require a dynamic_cast return self._CreateTableFromSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_authorized_view( + self, + ) -> Callable[[bigtable_table_admin.DeleteAuthorizedViewRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAuthorizedView(self._session, self._host, self._interceptor) # type: ignore + @property def delete_backup( self, @@ -3509,6 +4131,16 @@ def generate_consistency_token( # In C++ this would require a dynamic_cast return self._GenerateConsistencyToken(self._session, self._host, self._interceptor) # type: ignore + @property + def get_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.GetAuthorizedViewRequest], table.AuthorizedView + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAuthorizedView(self._session, self._host, self._interceptor) # type: ignore + @property def get_backup( self, @@ -3541,6 +4173,17 @@ def get_table( # In C++ this would require a dynamic_cast return self._GetTable(self._session, self._host, self._interceptor) # type: ignore + @property + def list_authorized_views( + self, + ) -> Callable[ + [bigtable_table_admin.ListAuthorizedViewsRequest], + bigtable_table_admin.ListAuthorizedViewsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAuthorizedViews(self._session, self._host, self._interceptor) # type: ignore + @property def list_backups( self, @@ -3629,6 +4272,16 @@ def undelete_table( # In C++ this would require a dynamic_cast return self._UndeleteTable(self._session, self._host, self._interceptor) # type: ignore + @property + def update_authorized_view( + self, + ) -> Callable[ + [bigtable_table_admin.UpdateAuthorizedViewRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAuthorizedView(self._session, self._host, self._interceptor) # type: ignore + @property def update_backup( self, diff --git a/google/cloud/bigtable_admin_v2/types/__init__.py b/google/cloud/bigtable_admin_v2/types/__init__.py index a2fefffc8..3ff9075d2 100644 --- a/google/cloud/bigtable_admin_v2/types/__init__.py +++ b/google/cloud/bigtable_admin_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -46,20 +46,27 @@ CheckConsistencyResponse, CopyBackupMetadata, CopyBackupRequest, + CreateAuthorizedViewMetadata, + CreateAuthorizedViewRequest, CreateBackupMetadata, CreateBackupRequest, CreateTableFromSnapshotMetadata, CreateTableFromSnapshotRequest, CreateTableRequest, + DataBoostReadLocalWrites, + DeleteAuthorizedViewRequest, DeleteBackupRequest, DeleteSnapshotRequest, DeleteTableRequest, DropRowRangeRequest, GenerateConsistencyTokenRequest, GenerateConsistencyTokenResponse, + GetAuthorizedViewRequest, GetBackupRequest, GetSnapshotRequest, GetTableRequest, + ListAuthorizedViewsRequest, + ListAuthorizedViewsResponse, ListBackupsRequest, ListBackupsResponse, ListSnapshotsRequest, @@ -72,8 +79,11 @@ RestoreTableRequest, SnapshotTableMetadata, SnapshotTableRequest, + StandardReadRemoteWrites, UndeleteTableMetadata, UndeleteTableRequest, + UpdateAuthorizedViewMetadata, + UpdateAuthorizedViewRequest, UpdateBackupRequest, UpdateTableMetadata, UpdateTableRequest, @@ -91,6 +101,7 @@ Instance, ) from .table import ( + AuthorizedView, Backup, BackupInfo, ChangeStreamConfig, @@ -102,6 +113,9 @@ Table, RestoreSourceType, ) +from .types import ( + Type, +) __all__ = ( "CreateAppProfileRequest", @@ -134,20 +148,27 @@ "CheckConsistencyResponse", "CopyBackupMetadata", "CopyBackupRequest", + "CreateAuthorizedViewMetadata", + "CreateAuthorizedViewRequest", "CreateBackupMetadata", "CreateBackupRequest", "CreateTableFromSnapshotMetadata", "CreateTableFromSnapshotRequest", "CreateTableRequest", + "DataBoostReadLocalWrites", + "DeleteAuthorizedViewRequest", "DeleteBackupRequest", "DeleteSnapshotRequest", "DeleteTableRequest", "DropRowRangeRequest", "GenerateConsistencyTokenRequest", "GenerateConsistencyTokenResponse", + "GetAuthorizedViewRequest", "GetBackupRequest", "GetSnapshotRequest", "GetTableRequest", + "ListAuthorizedViewsRequest", + "ListAuthorizedViewsResponse", "ListBackupsRequest", "ListBackupsResponse", "ListSnapshotsRequest", @@ -160,8 +181,11 @@ "RestoreTableRequest", "SnapshotTableMetadata", "SnapshotTableRequest", + "StandardReadRemoteWrites", "UndeleteTableMetadata", "UndeleteTableRequest", + "UpdateAuthorizedViewMetadata", + "UpdateAuthorizedViewRequest", "UpdateBackupRequest", "UpdateTableMetadata", "UpdateTableRequest", @@ -173,6 +197,7 @@ "Cluster", "HotTablet", "Instance", + "AuthorizedView", "Backup", "BackupInfo", "ChangeStreamConfig", @@ -183,4 +208,5 @@ "Snapshot", "Table", "RestoreSourceType", + "Type", ) diff --git a/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py b/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py index 87332a351..4e5ddfd6e 100644 --- a/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py +++ b/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py b/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py index c21ac4d5a..0bc3b6b81 100644 --- a/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py +++ b/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -47,6 +47,8 @@ "GenerateConsistencyTokenRequest", "GenerateConsistencyTokenResponse", "CheckConsistencyRequest", + "StandardReadRemoteWrites", + "DataBoostReadLocalWrites", "CheckConsistencyResponse", "SnapshotTableRequest", "GetSnapshotRequest", @@ -64,6 +66,14 @@ "ListBackupsResponse", "CopyBackupRequest", "CopyBackupMetadata", + "CreateAuthorizedViewRequest", + "CreateAuthorizedViewMetadata", + "ListAuthorizedViewsRequest", + "ListAuthorizedViewsResponse", + "GetAuthorizedViewRequest", + "UpdateAuthorizedViewRequest", + "UpdateAuthorizedViewMetadata", + "DeleteAuthorizedViewRequest", }, ) @@ -632,6 +642,11 @@ class Modification(proto.Message): given ID, or fail if no such family exists. This field is a member of `oneof`_ ``mod``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. A mask specifying which fields (e.g. ``gc_rule``) + in the ``update`` mod should be updated, ignored for other + modification types. If unset or empty, we treat it as + updating ``gc_rule`` to be backward compatible. """ id: str = proto.Field( @@ -655,6 +670,11 @@ class Modification(proto.Message): number=4, oneof="mod", ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=6, + message=field_mask_pb2.FieldMask, + ) name: str = proto.Field( proto.STRING, @@ -707,6 +727,13 @@ class CheckConsistencyRequest(proto.Message): r"""Request message for [google.bigtable.admin.v2.BigtableTableAdmin.CheckConsistency][google.bigtable.admin.v2.BigtableTableAdmin.CheckConsistency] + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Required. The unique name of the Table for which to check @@ -715,6 +742,20 @@ class CheckConsistencyRequest(proto.Message): consistency_token (str): Required. The token created using GenerateConsistencyToken for the Table. + standard_read_remote_writes (google.cloud.bigtable_admin_v2.types.StandardReadRemoteWrites): + Checks that reads using an app profile with + ``StandardIsolation`` can see all writes committed before + the token was created, even if the read and write target + different clusters. + + This field is a member of `oneof`_ ``mode``. + data_boost_read_local_writes (google.cloud.bigtable_admin_v2.types.DataBoostReadLocalWrites): + Checks that reads using an app profile with + ``DataBoostIsolationReadOnly`` can see all writes committed + before the token was created, but only if the read and write + target the same cluster. + + This field is a member of `oneof`_ ``mode``. """ name: str = proto.Field( @@ -725,6 +766,32 @@ class CheckConsistencyRequest(proto.Message): proto.STRING, number=2, ) + standard_read_remote_writes: "StandardReadRemoteWrites" = proto.Field( + proto.MESSAGE, + number=3, + oneof="mode", + message="StandardReadRemoteWrites", + ) + data_boost_read_local_writes: "DataBoostReadLocalWrites" = proto.Field( + proto.MESSAGE, + number=4, + oneof="mode", + message="DataBoostReadLocalWrites", + ) + + +class StandardReadRemoteWrites(proto.Message): + r"""Checks that all writes before the consistency token was + generated are replicated in every cluster and readable. + + """ + + +class DataBoostReadLocalWrites(proto.Message): + r"""Checks that all writes before the consistency token was + generated in the same cluster are readable by Databoost. + + """ class CheckConsistencyResponse(proto.Message): @@ -1368,4 +1435,273 @@ class CopyBackupMetadata(proto.Message): ) +class CreateAuthorizedViewRequest(proto.Message): + r"""The request for + [CreateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.CreateAuthorizedView] + + Attributes: + parent (str): + Required. This is the name of the table the AuthorizedView + belongs to. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}``. + authorized_view_id (str): + Required. The id of the AuthorizedView to create. This + AuthorizedView must not already exist. The + ``authorized_view_id`` appended to ``parent`` forms the full + AuthorizedView name of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedView/{authorized_view}``. + authorized_view (google.cloud.bigtable_admin_v2.types.AuthorizedView): + Required. The AuthorizedView to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + authorized_view_id: str = proto.Field( + proto.STRING, + number=2, + ) + authorized_view: gba_table.AuthorizedView = proto.Field( + proto.MESSAGE, + number=3, + message=gba_table.AuthorizedView, + ) + + +class CreateAuthorizedViewMetadata(proto.Message): + r"""The metadata for the Operation returned by + CreateAuthorizedView. + + Attributes: + original_request (google.cloud.bigtable_admin_v2.types.CreateAuthorizedViewRequest): + The request that prompted the initiation of + this CreateInstance operation. + request_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the original request was + received. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the operation failed or was + completed successfully. + """ + + original_request: "CreateAuthorizedViewRequest" = proto.Field( + proto.MESSAGE, + number=1, + message="CreateAuthorizedViewRequest", + ) + request_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ListAuthorizedViewsRequest(proto.Message): + r"""Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + + Attributes: + parent (str): + Required. The unique name of the table for which + AuthorizedViews should be listed. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}``. + page_size (int): + Optional. Maximum number of results per page. + + A page_size of zero lets the server choose the number of + items to return. A page_size which is strictly positive will + return at most that many items. A negative page_size will + cause an error. + + Following the first request, subsequent paginated calls are + not required to pass a page_size. If a page_size is set in + subsequent calls, it must match the page_size given in the + first request. + page_token (str): + Optional. The value of ``next_page_token`` returned by a + previous call. + view (google.cloud.bigtable_admin_v2.types.AuthorizedView.ResponseView): + Optional. The resource_view to be applied to the returned + views' fields. Default to NAME_ONLY. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + view: gba_table.AuthorizedView.ResponseView = proto.Field( + proto.ENUM, + number=4, + enum=gba_table.AuthorizedView.ResponseView, + ) + + +class ListAuthorizedViewsResponse(proto.Message): + r"""Response message for + [google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews][google.bigtable.admin.v2.BigtableTableAdmin.ListAuthorizedViews] + + Attributes: + authorized_views (MutableSequence[google.cloud.bigtable_admin_v2.types.AuthorizedView]): + The AuthorizedViews present in the requested + table. + next_page_token (str): + Set if not all tables could be returned in a single + response. Pass this value to ``page_token`` in another + request to get the next page of results. + """ + + @property + def raw_page(self): + return self + + authorized_views: MutableSequence[gba_table.AuthorizedView] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gba_table.AuthorizedView, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAuthorizedViewRequest(proto.Message): + r"""Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.GetAuthorizedView] + + Attributes: + name (str): + Required. The unique name of the requested AuthorizedView. + Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}``. + view (google.cloud.bigtable_admin_v2.types.AuthorizedView.ResponseView): + Optional. The resource_view to be applied to the returned + AuthorizedView's fields. Default to BASIC. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: gba_table.AuthorizedView.ResponseView = proto.Field( + proto.ENUM, + number=2, + enum=gba_table.AuthorizedView.ResponseView, + ) + + +class UpdateAuthorizedViewRequest(proto.Message): + r"""The request for + [UpdateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.UpdateAuthorizedView]. + + Attributes: + authorized_view (google.cloud.bigtable_admin_v2.types.AuthorizedView): + Required. The AuthorizedView to update. The ``name`` in + ``authorized_view`` is used to identify the AuthorizedView. + AuthorizedView name must in this format + projects//instances//tables//authorizedViews/ + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. A mask specifying + which fields in the AuthorizedView resource should be + updated. This mask is relative to the AuthorizedView + resource, not to the request message. A field will be + overwritten if it is in the mask. If empty, all fields set + in the request will be overwritten. A special value ``*`` + means to overwrite all fields (including fields not set in + the request). + ignore_warnings (bool): + Optional. If true, ignore the safety checks + when updating the AuthorizedView. + """ + + authorized_view: gba_table.AuthorizedView = proto.Field( + proto.MESSAGE, + number=1, + message=gba_table.AuthorizedView, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + ignore_warnings: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateAuthorizedViewMetadata(proto.Message): + r"""Metadata for the google.longrunning.Operation returned by + [UpdateAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.UpdateAuthorizedView]. + + Attributes: + original_request (google.cloud.bigtable_admin_v2.types.UpdateAuthorizedViewRequest): + The request that prompted the initiation of + this UpdateAuthorizedView operation. + request_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the original request was + received. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the operation failed or was + completed successfully. + """ + + original_request: "UpdateAuthorizedViewRequest" = proto.Field( + proto.MESSAGE, + number=1, + message="UpdateAuthorizedViewRequest", + ) + request_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class DeleteAuthorizedViewRequest(proto.Message): + r"""Request message for + [google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView][google.bigtable.admin.v2.BigtableTableAdmin.DeleteAuthorizedView] + + Attributes: + name (str): + Required. The unique name of the AuthorizedView to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}``. + etag (str): + Optional. The current etag of the + AuthorizedView. If an etag is provided and does + not match the current etag of the + AuthorizedView, deletion will be blocked and an + ABORTED error will be returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigtable_admin_v2/types/common.py b/google/cloud/bigtable_admin_v2/types/common.py index 959b9deb1..1ab52a0e3 100644 --- a/google/cloud/bigtable_admin_v2/types/common.py +++ b/google/cloud/bigtable_admin_v2/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_admin_v2/types/instance.py b/google/cloud/bigtable_admin_v2/types/instance.py index 78efd711b..f7916d44b 100644 --- a/google/cloud/bigtable_admin_v2/types/instance.py +++ b/google/cloud/bigtable_admin_v2/types/instance.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -431,6 +431,11 @@ class AppProfile(proto.Message): The standard options used for isolating this app profile's traffic from other use cases. + This field is a member of `oneof`_ ``isolation``. + data_boost_isolation_read_only (google.cloud.bigtable_admin_v2.types.AppProfile.DataBoostIsolationReadOnly): + Specifies that this app profile is intended + for read-only usage via the Data Boost feature. + This field is a member of `oneof`_ ``isolation``. """ @@ -517,6 +522,54 @@ class StandardIsolation(proto.Message): enum="AppProfile.Priority", ) + class DataBoostIsolationReadOnly(proto.Message): + r"""Data Boost is a serverless compute capability that lets you + run high-throughput read jobs on your Bigtable data, without + impacting the performance of the clusters that handle your + application traffic. Currently, Data Boost exclusively supports + read-only use-cases with single-cluster routing. + + Data Boost reads are only guaranteed to see the results of + writes that were written at least 30 minutes ago. This means + newly written values may not become visible for up to 30m, and + also means that old values may remain visible for up to 30m + after being deleted or overwritten. To mitigate the staleness of + the data, users may either wait 30m, or use CheckConsistency. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + compute_billing_owner (google.cloud.bigtable_admin_v2.types.AppProfile.DataBoostIsolationReadOnly.ComputeBillingOwner): + The Compute Billing Owner for this Data Boost + App Profile. + + This field is a member of `oneof`_ ``_compute_billing_owner``. + """ + + class ComputeBillingOwner(proto.Enum): + r"""Compute Billing Owner specifies how usage should be accounted + when using Data Boost. Compute Billing Owner also configures + which Cloud Project is charged for relevant quota. + + Values: + COMPUTE_BILLING_OWNER_UNSPECIFIED (0): + Unspecified value. + HOST_PAYS (1): + The host Cloud Project containing the + targeted Bigtable Instance / Table pays for + compute. + """ + COMPUTE_BILLING_OWNER_UNSPECIFIED = 0 + HOST_PAYS = 1 + + compute_billing_owner: "AppProfile.DataBoostIsolationReadOnly.ComputeBillingOwner" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="AppProfile.DataBoostIsolationReadOnly.ComputeBillingOwner", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -553,6 +606,12 @@ class StandardIsolation(proto.Message): oneof="isolation", message=StandardIsolation, ) + data_boost_isolation_read_only: DataBoostIsolationReadOnly = proto.Field( + proto.MESSAGE, + number=10, + oneof="isolation", + message=DataBoostIsolationReadOnly, + ) class HotTablet(proto.Message): diff --git a/google/cloud/bigtable_admin_v2/types/table.py b/google/cloud/bigtable_admin_v2/types/table.py index 57bd1b00f..ef162bee1 100644 --- a/google/cloud/bigtable_admin_v2/types/table.py +++ b/google/cloud/bigtable_admin_v2/types/table.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ import proto # type: ignore +from google.cloud.bigtable_admin_v2.types import types from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -31,6 +32,7 @@ "RestoreInfo", "ChangeStreamConfig", "Table", + "AuthorizedView", "ColumnFamily", "GcRule", "EncryptionInfo", @@ -109,6 +111,9 @@ class Table(proto.Message): timestamp. Each table is served using the resources of its parent cluster. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): The unique name of the table. Values are of the form @@ -152,6 +157,12 @@ class Table(proto.Message): Note one can still delete the data stored in the table through Data APIs. + automated_backup_policy (google.cloud.bigtable_admin_v2.types.Table.AutomatedBackupPolicy): + If specified, automated backups are enabled + for this table. Otherwise, automated backups are + disabled. + + This field is a member of `oneof`_ ``automated_backup_config``. """ class TimestampGranularity(proto.Enum): @@ -266,6 +277,31 @@ class ReplicationState(proto.Enum): message="EncryptionInfo", ) + class AutomatedBackupPolicy(proto.Message): + r"""Defines an automated backup policy for a table + + Attributes: + retention_period (google.protobuf.duration_pb2.Duration): + Required. How long the automated backups + should be retained. The only supported value at + this time is 3 days. + frequency (google.protobuf.duration_pb2.Duration): + Required. How frequently automated backups + should occur. The only supported value at this + time is 24 hours. + """ + + retention_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + frequency: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -301,6 +337,137 @@ class ReplicationState(proto.Enum): proto.BOOL, number=9, ) + automated_backup_policy: AutomatedBackupPolicy = proto.Field( + proto.MESSAGE, + number=13, + oneof="automated_backup_config", + message=AutomatedBackupPolicy, + ) + + +class AuthorizedView(proto.Message): + r"""AuthorizedViews represent subsets of a particular Cloud + Bigtable table. Users can configure access to each Authorized + View independently from the table and use the existing Data APIs + to access the subset of data. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The name of this AuthorizedView. Values are of + the form + ``projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}`` + subset_view (google.cloud.bigtable_admin_v2.types.AuthorizedView.SubsetView): + An AuthorizedView permitting access to an + explicit subset of a Table. + + This field is a member of `oneof`_ ``authorized_view``. + etag (str): + The etag for this AuthorizedView. + If this is provided on update, it must match the + server's etag. The server returns ABORTED error + on a mismatched etag. + deletion_protection (bool): + Set to true to make the AuthorizedView + protected against deletion. The parent Table and + containing Instance cannot be deleted if an + AuthorizedView has this bit set. + """ + + class ResponseView(proto.Enum): + r"""Defines a subset of an AuthorizedView's fields. + + Values: + RESPONSE_VIEW_UNSPECIFIED (0): + Uses the default view for each method as + documented in the request. + NAME_ONLY (1): + Only populates ``name``. + BASIC (2): + Only populates the AuthorizedView's basic metadata. This + includes: name, deletion_protection, etag. + FULL (3): + Populates every fields. + """ + RESPONSE_VIEW_UNSPECIFIED = 0 + NAME_ONLY = 1 + BASIC = 2 + FULL = 3 + + class FamilySubsets(proto.Message): + r"""Subsets of a column family that are included in this + AuthorizedView. + + Attributes: + qualifiers (MutableSequence[bytes]): + Individual exact column qualifiers to be + included in the AuthorizedView. + qualifier_prefixes (MutableSequence[bytes]): + Prefixes for qualifiers to be included in the + AuthorizedView. Every qualifier starting with + one of these prefixes is included in the + AuthorizedView. To provide access to all + qualifiers, include the empty string as a prefix + (""). + """ + + qualifiers: MutableSequence[bytes] = proto.RepeatedField( + proto.BYTES, + number=1, + ) + qualifier_prefixes: MutableSequence[bytes] = proto.RepeatedField( + proto.BYTES, + number=2, + ) + + class SubsetView(proto.Message): + r"""Defines a simple AuthorizedView that is a subset of the + underlying Table. + + Attributes: + row_prefixes (MutableSequence[bytes]): + Row prefixes to be included in the + AuthorizedView. To provide access to all rows, + include the empty string as a prefix (""). + family_subsets (MutableMapping[str, google.cloud.bigtable_admin_v2.types.AuthorizedView.FamilySubsets]): + Map from column family name to the columns in + this family to be included in the + AuthorizedView. + """ + + row_prefixes: MutableSequence[bytes] = proto.RepeatedField( + proto.BYTES, + number=1, + ) + family_subsets: MutableMapping[ + str, "AuthorizedView.FamilySubsets" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="AuthorizedView.FamilySubsets", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + subset_view: SubsetView = proto.Field( + proto.MESSAGE, + number=2, + oneof="authorized_view", + message=SubsetView, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=4, + ) class ColumnFamily(proto.Message): @@ -316,6 +483,20 @@ class ColumnFamily(proto.Message): opportunistically in the background, and so it's possible for reads to return a cell even if it matches the active GC expression for its family. + value_type (google.cloud.bigtable_admin_v2.types.Type): + The type of data stored in each of this family's cell + values, including its full encoding. If omitted, the family + only serves raw untyped bytes. + + For now, only the ``Aggregate`` type is supported. + + ``Aggregate`` can only be set at family creation and is + immutable afterwards. + + If ``value_type`` is ``Aggregate``, written data must be + compatible with: + + - ``value_type.input_type`` for ``AddInput`` mutations """ gc_rule: "GcRule" = proto.Field( @@ -323,6 +504,11 @@ class ColumnFamily(proto.Message): number=1, message="GcRule", ) + value_type: types.Type = proto.Field( + proto.MESSAGE, + number=3, + message=types.Type, + ) class GcRule(proto.Message): diff --git a/google/cloud/bigtable_admin_v2/types/types.py b/google/cloud/bigtable_admin_v2/types/types.py new file mode 100644 index 000000000..d57d1cdf3 --- /dev/null +++ b/google/cloud/bigtable_admin_v2/types/types.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.bigtable.admin.v2", + manifest={ + "Type", + }, +) + + +class Type(proto.Message): + r"""``Type`` represents the type of data that is written to, read from, + or stored in Bigtable. It is heavily based on the GoogleSQL standard + to help maintain familiarity and consistency across products and + features. + + For compatibility with Bigtable's existing untyped APIs, each + ``Type`` includes an ``Encoding`` which describes how to convert + to/from the underlying data. This might involve composing a series + of steps into an "encoding chain," for example to convert from INT64 + -> STRING -> raw bytes. In most cases, a "link" in the encoding + chain will be based an on existing GoogleSQL conversion function + like ``CAST``. + + Each link in the encoding chain also defines the following + properties: + + - Natural sort: Does the encoded value sort consistently with the + original typed value? Note that Bigtable will always sort data + based on the raw encoded value, *not* the decoded type. + + - Example: STRING values sort in the same order as their UTF-8 + encodings. + - Counterexample: Encoding INT64 to a fixed-width STRING does + *not* preserve sort order when dealing with negative numbers. + INT64(1) > INT64(-1), but STRING("-00001") > STRING("00001). + - The overall encoding chain sorts naturally if *every* link + does. + + - Self-delimiting: If we concatenate two encoded values, can we + always tell where the first one ends and the second one begins? + + - Example: If we encode INT64s to fixed-width STRINGs, the first + value will always contain exactly N digits, possibly preceded + by a sign. + - Counterexample: If we concatenate two UTF-8 encoded STRINGs, + we have no way to tell where the first one ends. + - The overall encoding chain is self-delimiting if *any* link + is. + + - Compatibility: Which other systems have matching encoding + schemes? For example, does this encoding have a GoogleSQL + equivalent? HBase? Java? + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bytes_type (google.cloud.bigtable_admin_v2.types.Type.Bytes): + Bytes + + This field is a member of `oneof`_ ``kind``. + int64_type (google.cloud.bigtable_admin_v2.types.Type.Int64): + Int64 + + This field is a member of `oneof`_ ``kind``. + aggregate_type (google.cloud.bigtable_admin_v2.types.Type.Aggregate): + Aggregate + + This field is a member of `oneof`_ ``kind``. + """ + + class Bytes(proto.Message): + r"""Bytes Values of type ``Bytes`` are stored in ``Value.bytes_value``. + + Attributes: + encoding (google.cloud.bigtable_admin_v2.types.Type.Bytes.Encoding): + The encoding to use when converting to/from + lower level types. + """ + + class Encoding(proto.Message): + r"""Rules used to convert to/from lower level types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw (google.cloud.bigtable_admin_v2.types.Type.Bytes.Encoding.Raw): + Use ``Raw`` encoding. + + This field is a member of `oneof`_ ``encoding``. + """ + + class Raw(proto.Message): + r"""Leaves the value "as-is" + + - Natural sort? Yes + - Self-delimiting? No + - Compatibility? N/A + + """ + + raw: "Type.Bytes.Encoding.Raw" = proto.Field( + proto.MESSAGE, + number=1, + oneof="encoding", + message="Type.Bytes.Encoding.Raw", + ) + + encoding: "Type.Bytes.Encoding" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.Bytes.Encoding", + ) + + class Int64(proto.Message): + r"""Int64 Values of type ``Int64`` are stored in ``Value.int_value``. + + Attributes: + encoding (google.cloud.bigtable_admin_v2.types.Type.Int64.Encoding): + The encoding to use when converting to/from + lower level types. + """ + + class Encoding(proto.Message): + r"""Rules used to convert to/from lower level types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + big_endian_bytes (google.cloud.bigtable_admin_v2.types.Type.Int64.Encoding.BigEndianBytes): + Use ``BigEndianBytes`` encoding. + + This field is a member of `oneof`_ ``encoding``. + """ + + class BigEndianBytes(proto.Message): + r"""Encodes the value as an 8-byte big endian twos complement ``Bytes`` + value. + + - Natural sort? No (positive values only) + - Self-delimiting? Yes + - Compatibility? + + - BigQuery Federation ``BINARY`` encoding + - HBase ``Bytes.toBytes`` + - Java ``ByteBuffer.putLong()`` with ``ByteOrder.BIG_ENDIAN`` + + Attributes: + bytes_type (google.cloud.bigtable_admin_v2.types.Type.Bytes): + The underlying ``Bytes`` type, which may be able to encode + further. + """ + + bytes_type: "Type.Bytes" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.Bytes", + ) + + big_endian_bytes: "Type.Int64.Encoding.BigEndianBytes" = proto.Field( + proto.MESSAGE, + number=1, + oneof="encoding", + message="Type.Int64.Encoding.BigEndianBytes", + ) + + encoding: "Type.Int64.Encoding" = proto.Field( + proto.MESSAGE, + number=1, + message="Type.Int64.Encoding", + ) + + class Aggregate(proto.Message): + r"""A value that combines incremental updates into a summarized value. + + Data is never directly written or read using type ``Aggregate``. + Writes will provide either the ``input_type`` or ``state_type``, and + reads will always return the ``state_type`` . + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + input_type (google.cloud.bigtable_admin_v2.types.Type): + Type of the inputs that are accumulated by this + ``Aggregate``, which must specify a full encoding. Use + ``AddInput`` mutations to accumulate new inputs. + state_type (google.cloud.bigtable_admin_v2.types.Type): + Output only. Type that holds the internal accumulator state + for the ``Aggregate``. This is a function of the + ``input_type`` and ``aggregator`` chosen, and will always + specify a full encoding. + sum (google.cloud.bigtable_admin_v2.types.Type.Aggregate.Sum): + Sum aggregator. + + This field is a member of `oneof`_ ``aggregator``. + """ + + class Sum(proto.Message): + r"""Computes the sum of the input values. Allowed input: ``Int64`` + State: same as input + + """ + + input_type: "Type" = proto.Field( + proto.MESSAGE, + number=1, + message="Type", + ) + state_type: "Type" = proto.Field( + proto.MESSAGE, + number=2, + message="Type", + ) + sum: "Type.Aggregate.Sum" = proto.Field( + proto.MESSAGE, + number=4, + oneof="aggregator", + message="Type.Aggregate.Sum", + ) + + bytes_type: Bytes = proto.Field( + proto.MESSAGE, + number=1, + oneof="kind", + message=Bytes, + ) + int64_type: Int64 = proto.Field( + proto.MESSAGE, + number=5, + oneof="kind", + message=Int64, + ) + aggregate_type: Aggregate = proto.Field( + proto.MESSAGE, + number=6, + oneof="kind", + message=Aggregate, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigtable_v2/__init__.py b/google/cloud/bigtable_v2/__init__.py index 80bd4ec09..56748d882 100644 --- a/google/cloud/bigtable_v2/__init__.py +++ b/google/cloud/bigtable_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -54,6 +54,7 @@ from .types.data import StreamContinuationTokens from .types.data import StreamPartition from .types.data import TimestampRange +from .types.data import Value from .types.data import ValueRange from .types.feature_flags import FeatureFlags from .types.request_stats import FullReadStatsView @@ -104,5 +105,6 @@ "StreamContinuationTokens", "StreamPartition", "TimestampRange", + "Value", "ValueRange", ) diff --git a/google/cloud/bigtable_v2/services/__init__.py b/google/cloud/bigtable_v2/services/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/google/cloud/bigtable_v2/services/__init__.py +++ b/google/cloud/bigtable_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_v2/services/bigtable/__init__.py b/google/cloud/bigtable_v2/services/bigtable/__init__.py index f10a68e5b..191b24851 100644 --- a/google/cloud/bigtable_v2/services/bigtable/__init__.py +++ b/google/cloud/bigtable_v2/services/bigtable/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_v2/services/bigtable/async_client.py b/google/cloud/bigtable_v2/services/bigtable/async_client.py index 0421e19bc..70daa63e3 100644 --- a/google/cloud/bigtable_v2/services/bigtable/async_client.py +++ b/google/cloud/bigtable_v2/services/bigtable/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import functools from collections import OrderedDict import functools import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -67,6 +67,8 @@ class BigtableAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = BigtableClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = BigtableClient._DEFAULT_UNIVERSE + authorized_view_path = staticmethod(BigtableClient.authorized_view_path) + parse_authorized_view_path = staticmethod(BigtableClient.parse_authorized_view_path) instance_path = staticmethod(BigtableClient.instance_path) parse_instance_path = staticmethod(BigtableClient.parse_instance_path) table_path = staticmethod(BigtableClient.table_path) @@ -193,7 +195,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, BigtableTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, BigtableTransport, Callable[..., BigtableTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -205,9 +209,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.BigtableTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BigtableTransport,Callable[..., BigtableTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BigtableTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -273,8 +279,10 @@ def read_rows( The request object. Request message for Bigtable.ReadRows. table_name (:class:`str`): - Required. The unique name of the table from which to - read. Values are of the form + Optional. The unique name of the table from which to + read. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -302,8 +310,8 @@ def read_rows( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -311,6 +319,8 @@ def read_rows( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.ReadRowsRequest): request = bigtable.ReadRowsRequest(request) @@ -326,6 +336,7 @@ def read_rows( rpc = self._client._transport._wrapped_methods[ self._client._transport.read_rows ] + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( @@ -369,8 +380,10 @@ def sample_row_keys( The request object. Request message for Bigtable.SampleRowKeys. table_name (:class:`str`): - Required. The unique name of the table from which to - sample row keys. Values are of the form + Optional. The unique name of the table from which to + sample row keys. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -398,8 +411,8 @@ def sample_row_keys( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -407,6 +420,8 @@ def sample_row_keys( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.SampleRowKeysRequest): request = bigtable.SampleRowKeysRequest(request) @@ -422,6 +437,7 @@ def sample_row_keys( rpc = self._client._transport._wrapped_methods[ self._client._transport.sample_row_keys ] + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( @@ -464,8 +480,10 @@ async def mutate_row( The request object. Request message for Bigtable.MutateRow. table_name (:class:`str`): - Required. The unique name of the table to which the - mutation should be applied. Values are of the form + Optional. The unique name of the table to which the + mutation should be applied. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -511,8 +529,8 @@ async def mutate_row( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, row_key, mutations, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -520,6 +538,8 @@ async def mutate_row( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.MutateRowRequest): request = bigtable.MutateRowRequest(request) @@ -582,9 +602,11 @@ def mutate_rows( The request object. Request message for BigtableService.MutateRows. table_name (:class:`str`): - Required. The unique name of the - table to which the mutations should be - applied. + Optional. The unique name of the table to which the + mutations should be applied. + + Values are of the form + ``projects//instances//tables/
``. This corresponds to the ``table_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -625,8 +647,8 @@ def mutate_rows( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, entries, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -634,6 +656,8 @@ def mutate_rows( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.MutateRowsRequest): request = bigtable.MutateRowsRequest(request) @@ -696,9 +720,10 @@ async def check_and_mutate_row( The request object. Request message for Bigtable.CheckAndMutateRow. table_name (:class:`str`): - Required. The unique name of the table to which the - conditional mutation should be applied. Values are of - the form + Optional. The unique name of the table to which the + conditional mutation should be applied. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -766,8 +791,8 @@ async def check_and_mutate_row( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ table_name, @@ -784,6 +809,8 @@ async def check_and_mutate_row( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.CheckAndMutateRowRequest): request = bigtable.CheckAndMutateRowRequest(request) @@ -879,8 +906,8 @@ async def ping_and_warm( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -888,6 +915,8 @@ async def ping_and_warm( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.PingAndWarmRequest): request = bigtable.PingAndWarmRequest(request) @@ -949,9 +978,10 @@ async def read_modify_write_row( The request object. Request message for Bigtable.ReadModifyWriteRow. table_name (:class:`str`): - Required. The unique name of the table to which the - read/modify/write rules should be applied. Values are of - the form + Optional. The unique name of the table to which the + read/modify/write rules should be applied. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -998,8 +1028,8 @@ async def read_modify_write_row( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, row_key, rules, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1007,6 +1037,8 @@ async def read_modify_write_row( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.ReadModifyWriteRowRequest): request = bigtable.ReadModifyWriteRowRequest(request) @@ -1108,8 +1140,8 @@ def generate_initial_change_stream_partitions( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1117,6 +1149,8 @@ def generate_initial_change_stream_partitions( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, bigtable.GenerateInitialChangeStreamPartitionsRequest ): @@ -1131,11 +1165,9 @@ def generate_initial_change_stream_partitions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.generate_initial_change_stream_partitions, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_initial_change_stream_partitions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1212,8 +1244,8 @@ def read_change_stream( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1221,6 +1253,8 @@ def read_change_stream( "the individual field arguments should be set." ) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.ReadChangeStreamRequest): request = bigtable.ReadChangeStreamRequest(request) @@ -1233,11 +1267,9 @@ def read_change_stream( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.read_change_stream, - default_timeout=43200.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.read_change_stream + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/bigtable_v2/services/bigtable/client.py b/google/cloud/bigtable_v2/services/bigtable/client.py index f53f25e90..7eda705b9 100644 --- a/google/cloud/bigtable_v2/services/bigtable/client.py +++ b/google/cloud/bigtable_v2/services/bigtable/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -185,6 +186,30 @@ def transport(self) -> BigtableTransport: """ return self._transport + @staticmethod + def authorized_view_path( + project: str, + instance: str, + table: str, + authorized_view: str, + ) -> str: + """Returns a fully-qualified authorized_view string.""" + return "projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}".format( + project=project, + instance=instance, + table=table, + authorized_view=authorized_view, + ) + + @staticmethod + def parse_authorized_view_path(path: str) -> Dict[str, str]: + """Parses a authorized_view path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/instances/(?P.+?)/tables/(?P
.+?)/authorizedViews/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def instance_path( project: str, @@ -549,7 +574,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BigtableTransport]] = None, + transport: Optional[ + Union[str, BigtableTransport, Callable[..., BigtableTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -561,9 +588,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, BigtableTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BigtableTransport,Callable[..., BigtableTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BigtableTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -669,8 +698,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[BigtableTransport], Callable[..., BigtableTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BigtableTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -704,8 +740,10 @@ def read_rows( The request object. Request message for Bigtable.ReadRows. table_name (str): - Required. The unique name of the table from which to - read. Values are of the form + Optional. The unique name of the table from which to + read. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -733,8 +771,8 @@ def read_rows( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -742,10 +780,8 @@ def read_rows( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.ReadRowsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.ReadRowsRequest): request = bigtable.ReadRowsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -771,6 +807,15 @@ def read_rows( if request.app_profile_id: header_params["app_profile_id"] = request.app_profile_id + routing_param_regex = re.compile( + "^(?Pprojects/[^/]+/instances/[^/]+/tables/[^/]+/authorizedViews/[^/]+)$" + ) + regex_match = routing_param_regex.match(request.authorized_view_name) + if regex_match and regex_match.group("authorized_view_name"): + header_params["authorized_view_name"] = regex_match.group( + "authorized_view_name" + ) + if header_params: metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(header_params), @@ -811,8 +856,10 @@ def sample_row_keys( The request object. Request message for Bigtable.SampleRowKeys. table_name (str): - Required. The unique name of the table from which to - sample row keys. Values are of the form + Optional. The unique name of the table from which to + sample row keys. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -840,8 +887,8 @@ def sample_row_keys( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -849,10 +896,8 @@ def sample_row_keys( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.SampleRowKeysRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.SampleRowKeysRequest): request = bigtable.SampleRowKeysRequest(request) # If we have keyword arguments corresponding to fields on the @@ -878,6 +923,15 @@ def sample_row_keys( if request.app_profile_id: header_params["app_profile_id"] = request.app_profile_id + routing_param_regex = re.compile( + "^(?Pprojects/[^/]+/instances/[^/]+/tables/[^/]+/authorizedViews/[^/]+)$" + ) + regex_match = routing_param_regex.match(request.authorized_view_name) + if regex_match and regex_match.group("authorized_view_name"): + header_params["authorized_view_name"] = regex_match.group( + "authorized_view_name" + ) + if header_params: metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(header_params), @@ -917,8 +971,10 @@ def mutate_row( The request object. Request message for Bigtable.MutateRow. table_name (str): - Required. The unique name of the table to which the - mutation should be applied. Values are of the form + Optional. The unique name of the table to which the + mutation should be applied. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -964,8 +1020,8 @@ def mutate_row( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, row_key, mutations, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -973,10 +1029,8 @@ def mutate_row( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.MutateRowRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.MutateRowRequest): request = bigtable.MutateRowRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1006,6 +1060,15 @@ def mutate_row( if request.app_profile_id: header_params["app_profile_id"] = request.app_profile_id + routing_param_regex = re.compile( + "^(?Pprojects/[^/]+/instances/[^/]+/tables/[^/]+/authorizedViews/[^/]+)$" + ) + regex_match = routing_param_regex.match(request.authorized_view_name) + if regex_match and regex_match.group("authorized_view_name"): + header_params["authorized_view_name"] = regex_match.group( + "authorized_view_name" + ) + if header_params: metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(header_params), @@ -1045,9 +1108,11 @@ def mutate_rows( The request object. Request message for BigtableService.MutateRows. table_name (str): - Required. The unique name of the - table to which the mutations should be - applied. + Optional. The unique name of the table to which the + mutations should be applied. + + Values are of the form + ``projects//instances//tables/
``. This corresponds to the ``table_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1088,8 +1153,8 @@ def mutate_rows( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, entries, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1097,10 +1162,8 @@ def mutate_rows( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.MutateRowsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.MutateRowsRequest): request = bigtable.MutateRowsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1128,6 +1191,15 @@ def mutate_rows( if request.app_profile_id: header_params["app_profile_id"] = request.app_profile_id + routing_param_regex = re.compile( + "^(?Pprojects/[^/]+/instances/[^/]+/tables/[^/]+/authorizedViews/[^/]+)$" + ) + regex_match = routing_param_regex.match(request.authorized_view_name) + if regex_match and regex_match.group("authorized_view_name"): + header_params["authorized_view_name"] = regex_match.group( + "authorized_view_name" + ) + if header_params: metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(header_params), @@ -1169,9 +1241,10 @@ def check_and_mutate_row( The request object. Request message for Bigtable.CheckAndMutateRow. table_name (str): - Required. The unique name of the table to which the - conditional mutation should be applied. Values are of - the form + Optional. The unique name of the table to which the + conditional mutation should be applied. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -1239,8 +1312,8 @@ def check_and_mutate_row( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ table_name, @@ -1257,10 +1330,8 @@ def check_and_mutate_row( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.CheckAndMutateRowRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.CheckAndMutateRowRequest): request = bigtable.CheckAndMutateRowRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1294,6 +1365,15 @@ def check_and_mutate_row( if request.app_profile_id: header_params["app_profile_id"] = request.app_profile_id + routing_param_regex = re.compile( + "^(?Pprojects/[^/]+/instances/[^/]+/tables/[^/]+/authorizedViews/[^/]+)$" + ) + regex_match = routing_param_regex.match(request.authorized_view_name) + if regex_match and regex_match.group("authorized_view_name"): + header_params["authorized_view_name"] = regex_match.group( + "authorized_view_name" + ) + if header_params: metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(header_params), @@ -1362,8 +1442,8 @@ def ping_and_warm( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1371,10 +1451,8 @@ def ping_and_warm( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.PingAndWarmRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.PingAndWarmRequest): request = bigtable.PingAndWarmRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1442,9 +1520,10 @@ def read_modify_write_row( The request object. Request message for Bigtable.ReadModifyWriteRow. table_name (str): - Required. The unique name of the table to which the - read/modify/write rules should be applied. Values are of - the form + Optional. The unique name of the table to which the + read/modify/write rules should be applied. + + Values are of the form ``projects//instances//tables/
``. This corresponds to the ``table_name`` field @@ -1491,8 +1570,8 @@ def read_modify_write_row( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, row_key, rules, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1500,10 +1579,8 @@ def read_modify_write_row( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.ReadModifyWriteRowRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.ReadModifyWriteRowRequest): request = bigtable.ReadModifyWriteRowRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1533,6 +1610,15 @@ def read_modify_write_row( if request.app_profile_id: header_params["app_profile_id"] = request.app_profile_id + routing_param_regex = re.compile( + "^(?Pprojects/[^/]+/instances/[^/]+/tables/[^/]+/authorizedViews/[^/]+)$" + ) + regex_match = routing_param_regex.match(request.authorized_view_name) + if regex_match and regex_match.group("authorized_view_name"): + header_params["authorized_view_name"] = regex_match.group( + "authorized_view_name" + ) + if header_params: metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(header_params), @@ -1609,8 +1695,8 @@ def generate_initial_change_stream_partitions( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1618,10 +1704,8 @@ def generate_initial_change_stream_partitions( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.GenerateInitialChangeStreamPartitionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, bigtable.GenerateInitialChangeStreamPartitionsRequest ): @@ -1714,8 +1798,8 @@ def read_change_stream( """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([table_name, app_profile_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1723,10 +1807,8 @@ def read_change_stream( "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a bigtable.ReadChangeStreamRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, bigtable.ReadChangeStreamRequest): request = bigtable.ReadChangeStreamRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py b/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py index 6a9eb0e58..ae5c1cf72 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/base.py b/google/cloud/bigtable_v2/services/bigtable/transports/base.py index 7d1475eb9..d93379723 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/base.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py b/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py index bec9c85f1..2a1a9a284 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -51,7 +51,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -71,14 +71,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -88,11 +91,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,7 +121,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -159,7 +162,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py b/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py index 7765ecce8..2d04f79af 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,7 +19,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -68,7 +68,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -98,7 +97,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -118,15 +117,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -136,11 +138,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -166,7 +168,7 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -206,7 +208,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -515,7 +519,7 @@ def read_change_stream( return self._stubs["read_change_stream"] def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.read_rows: gapic_v1.method_async.wrap_method( self.read_rows, @@ -529,7 +533,7 @@ def _prep_wrapped_messages(self, client_info): ), self.mutate_row: gapic_v1.method_async.wrap_method( self.mutate_row, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.01, maximum=60.0, multiplier=2, diff --git a/google/cloud/bigtable_v2/services/bigtable/transports/rest.py b/google/cloud/bigtable_v2/services/bigtable/transports/rest.py index d77291a65..a4d8e0ce9 100644 --- a/google/cloud/bigtable_v2/services/bigtable/transports/rest.py +++ b/google/cloud/bigtable_v2/services/bigtable/transports/rest.py @@ -490,6 +490,11 @@ def __call__( "uri": "/v2/{table_name=projects/*/instances/*/tables/*}:checkAndMutateRow", "body": "*", }, + { + "method": "post", + "uri": "/v2/{authorized_view_name=projects/*/instances/*/tables/*/authorizedViews/*}:checkAndMutateRow", + "body": "*", + }, ] request, metadata = self._interceptor.pre_check_and_mutate_row( request, metadata @@ -695,6 +700,11 @@ def __call__( "uri": "/v2/{table_name=projects/*/instances/*/tables/*}:mutateRow", "body": "*", }, + { + "method": "post", + "uri": "/v2/{authorized_view_name=projects/*/instances/*/tables/*/authorizedViews/*}:mutateRow", + "body": "*", + }, ] request, metadata = self._interceptor.pre_mutate_row(request, metadata) pb_request = bigtable.MutateRowRequest.pb(request) @@ -790,6 +800,11 @@ def __call__( "uri": "/v2/{table_name=projects/*/instances/*/tables/*}:mutateRows", "body": "*", }, + { + "method": "post", + "uri": "/v2/{authorized_view_name=projects/*/instances/*/tables/*/authorizedViews/*}:mutateRows", + "body": "*", + }, ] request, metadata = self._interceptor.pre_mutate_rows(request, metadata) pb_request = bigtable.MutateRowsRequest.pb(request) @@ -1078,6 +1093,11 @@ def __call__( "uri": "/v2/{table_name=projects/*/instances/*/tables/*}:readModifyWriteRow", "body": "*", }, + { + "method": "post", + "uri": "/v2/{authorized_view_name=projects/*/instances/*/tables/*/authorizedViews/*}:readModifyWriteRow", + "body": "*", + }, ] request, metadata = self._interceptor.pre_read_modify_write_row( request, metadata @@ -1132,16 +1152,6 @@ class _ReadRows(BigtableRestStub): def __hash__(self): return hash("ReadRows") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - def __call__( self, request: bigtable.ReadRowsRequest, @@ -1175,6 +1185,11 @@ def __call__( "uri": "/v2/{table_name=projects/*/instances/*/tables/*}:readRows", "body": "*", }, + { + "method": "post", + "uri": "/v2/{authorized_view_name=projects/*/instances/*/tables/*/authorizedViews/*}:readRows", + "body": "*", + }, ] request, metadata = self._interceptor.pre_read_rows(request, metadata) pb_request = bigtable.ReadRowsRequest.pb(request) @@ -1195,7 +1210,6 @@ def __call__( use_integers_for_enums=True, ) ) - query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" @@ -1224,16 +1238,6 @@ class _SampleRowKeys(BigtableRestStub): def __hash__(self): return hash("SampleRowKeys") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - def __call__( self, request: bigtable.SampleRowKeysRequest, @@ -1266,6 +1270,10 @@ def __call__( "method": "get", "uri": "/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys", }, + { + "method": "get", + "uri": "/v2/{authorized_view_name=projects/*/instances/*/tables/*/authorizedViews/*}:sampleRowKeys", + }, ] request, metadata = self._interceptor.pre_sample_row_keys(request, metadata) pb_request = bigtable.SampleRowKeysRequest.pb(request) @@ -1281,7 +1289,6 @@ def __call__( use_integers_for_enums=True, ) ) - query_params.update(self._get_unset_required_fields(query_params)) query_params["$alt"] = "json;enum-encoding=int" diff --git a/google/cloud/bigtable_v2/types/__init__.py b/google/cloud/bigtable_v2/types/__init__.py index f266becb9..a7961a910 100644 --- a/google/cloud/bigtable_v2/types/__init__.py +++ b/google/cloud/bigtable_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -49,6 +49,7 @@ StreamContinuationTokens, StreamPartition, TimestampRange, + Value, ValueRange, ) from .feature_flags import ( @@ -98,6 +99,7 @@ "StreamContinuationTokens", "StreamPartition", "TimestampRange", + "Value", "ValueRange", "FeatureFlags", "FullReadStatsView", diff --git a/google/cloud/bigtable_v2/types/bigtable.py b/google/cloud/bigtable_v2/types/bigtable.py index 57f806408..fa6c566a2 100644 --- a/google/cloud/bigtable_v2/types/bigtable.py +++ b/google/cloud/bigtable_v2/types/bigtable.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -58,9 +58,16 @@ class ReadRowsRequest(proto.Message): Attributes: table_name (str): - Required. The unique name of the table from which to read. + Optional. The unique name of the table from which to read. + Values are of the form ``projects//instances//tables/
``. + authorized_view_name (str): + Optional. The unique name of the AuthorizedView from which + to read. + + Values are of the form + ``projects//instances//tables/
/authorizedViews/``. app_profile_id (str): This value specifies routing for replication. If not specified, the "default" application @@ -123,6 +130,10 @@ class RequestStatsView(proto.Enum): proto.STRING, number=1, ) + authorized_view_name: str = proto.Field( + proto.STRING, + number=9, + ) app_profile_id: str = proto.Field( proto.STRING, number=5, @@ -327,9 +338,17 @@ class SampleRowKeysRequest(proto.Message): Attributes: table_name (str): - Required. The unique name of the table from which to sample - row keys. Values are of the form + Optional. The unique name of the table from which to sample + row keys. + + Values are of the form ``projects//instances//tables/
``. + authorized_view_name (str): + Optional. The unique name of the AuthorizedView from which + to sample row keys. + + Values are of the form + ``projects//instances//tables/
/authorizedViews/``. app_profile_id (str): This value specifies routing for replication. If not specified, the "default" application @@ -340,6 +359,10 @@ class SampleRowKeysRequest(proto.Message): proto.STRING, number=1, ) + authorized_view_name: str = proto.Field( + proto.STRING, + number=4, + ) app_profile_id: str = proto.Field( proto.STRING, number=2, @@ -385,9 +408,17 @@ class MutateRowRequest(proto.Message): Attributes: table_name (str): - Required. The unique name of the table to which the mutation - should be applied. Values are of the form + Optional. The unique name of the table to which the mutation + should be applied. + + Values are of the form ``projects//instances//tables/
``. + authorized_view_name (str): + Optional. The unique name of the AuthorizedView to which the + mutation should be applied. + + Values are of the form + ``projects//instances//tables/
/authorizedViews/``. app_profile_id (str): This value specifies routing for replication. If not specified, the "default" application @@ -407,6 +438,10 @@ class MutateRowRequest(proto.Message): proto.STRING, number=1, ) + authorized_view_name: str = proto.Field( + proto.STRING, + number=6, + ) app_profile_id: str = proto.Field( proto.STRING, number=4, @@ -431,8 +466,17 @@ class MutateRowsRequest(proto.Message): Attributes: table_name (str): - Required. The unique name of the table to - which the mutations should be applied. + Optional. The unique name of the table to which the + mutations should be applied. + + Values are of the form + ``projects//instances//tables/
``. + authorized_view_name (str): + Optional. The unique name of the AuthorizedView to which the + mutations should be applied. + + Values are of the form + ``projects//instances//tables/
/authorizedViews/``. app_profile_id (str): This value specifies routing for replication. If not specified, the "default" application @@ -476,6 +520,10 @@ class Entry(proto.Message): proto.STRING, number=1, ) + authorized_view_name: str = proto.Field( + proto.STRING, + number=5, + ) app_profile_id: str = proto.Field( proto.STRING, number=3, @@ -587,10 +635,17 @@ class CheckAndMutateRowRequest(proto.Message): Attributes: table_name (str): - Required. The unique name of the table to which the - conditional mutation should be applied. Values are of the - form + Optional. The unique name of the table to which the + conditional mutation should be applied. + + Values are of the form ``projects//instances//tables/
``. + authorized_view_name (str): + Optional. The unique name of the AuthorizedView to which the + conditional mutation should be applied. + + Values are of the form + ``projects//instances//tables/
/authorizedViews/``. app_profile_id (str): This value specifies routing for replication. If not specified, the "default" application @@ -624,6 +679,10 @@ class CheckAndMutateRowRequest(proto.Message): proto.STRING, number=1, ) + authorized_view_name: str = proto.Field( + proto.STRING, + number=9, + ) app_profile_id: str = proto.Field( proto.STRING, number=7, @@ -700,10 +759,17 @@ class ReadModifyWriteRowRequest(proto.Message): Attributes: table_name (str): - Required. The unique name of the table to which the - read/modify/write rules should be applied. Values are of the - form + Optional. The unique name of the table to which the + read/modify/write rules should be applied. + + Values are of the form ``projects//instances//tables/
``. + authorized_view_name (str): + Optional. The unique name of the AuthorizedView to which the + read/modify/write rules should be applied. + + Values are of the form + ``projects//instances//tables/
/authorizedViews/``. app_profile_id (str): This value specifies routing for replication. If not specified, the "default" application @@ -723,6 +789,10 @@ class ReadModifyWriteRowRequest(proto.Message): proto.STRING, number=1, ) + authorized_view_name: str = proto.Field( + proto.STRING, + number=6, + ) app_profile_id: str = proto.Field( proto.STRING, number=4, diff --git a/google/cloud/bigtable_v2/types/data.py b/google/cloud/bigtable_v2/types/data.py index e37644a76..b2b853c64 100644 --- a/google/cloud/bigtable_v2/types/data.py +++ b/google/cloud/bigtable_v2/types/data.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ "Family", "Column", "Cell", + "Value", "RowRange", "RowSet", "ColumnRange", @@ -164,6 +165,54 @@ class Cell(proto.Message): ) +class Value(proto.Message): + r"""``Value`` represents a dynamically typed value. The typed fields in + ``Value`` are used as a transport encoding for the actual value + (which may be of a more complex type). See the documentation of the + ``Type`` message for more details. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_value (bytes): + Represents a raw byte sequence with no type information. The + ``type`` field must be omitted. + + This field is a member of `oneof`_ ``kind``. + raw_timestamp_micros (int): + Represents a raw cell timestamp with no type information. + The ``type`` field must be omitted. + + This field is a member of `oneof`_ ``kind``. + int_value (int): + Represents a typed value transported as an integer. Default + type for writes: ``Int64`` + + This field is a member of `oneof`_ ``kind``. + """ + + raw_value: bytes = proto.Field( + proto.BYTES, + number=8, + oneof="kind", + ) + raw_timestamp_micros: int = proto.Field( + proto.INT64, + number=9, + oneof="kind", + ) + int_value: int = proto.Field( + proto.INT64, + number=6, + oneof="kind", + ) + + class RowRange(proto.Message): r"""Specifies a contiguous range of rows. @@ -853,6 +902,10 @@ class Mutation(proto.Message): set_cell (google.cloud.bigtable_v2.types.Mutation.SetCell): Set a cell's value. + This field is a member of `oneof`_ ``mutation``. + add_to_cell (google.cloud.bigtable_v2.types.Mutation.AddToCell): + Incrementally updates an ``Aggregate`` cell. + This field is a member of `oneof`_ ``mutation``. delete_from_column (google.cloud.bigtable_v2.types.Mutation.DeleteFromColumn): Deletes cells from a column. @@ -909,6 +962,48 @@ class SetCell(proto.Message): number=4, ) + class AddToCell(proto.Message): + r"""A Mutation which incrementally updates a cell in an ``Aggregate`` + family. + + Attributes: + family_name (str): + The name of the ``Aggregate`` family into which new data + should be added. This must be a family with a ``value_type`` + of ``Aggregate``. Format: ``[-_.a-zA-Z0-9]+`` + column_qualifier (google.cloud.bigtable_v2.types.Value): + The qualifier of the column into which new data should be + added. This must be a ``raw_value``. + timestamp (google.cloud.bigtable_v2.types.Value): + The timestamp of the cell to which new data should be added. + This must be a ``raw_timestamp_micros`` that matches the + table's ``granularity``. + input (google.cloud.bigtable_v2.types.Value): + The input value to be accumulated into the specified cell. + This must be compatible with the family's + ``value_type.input_type``. + """ + + family_name: str = proto.Field( + proto.STRING, + number=1, + ) + column_qualifier: "Value" = proto.Field( + proto.MESSAGE, + number=2, + message="Value", + ) + timestamp: "Value" = proto.Field( + proto.MESSAGE, + number=3, + message="Value", + ) + input: "Value" = proto.Field( + proto.MESSAGE, + number=4, + message="Value", + ) + class DeleteFromColumn(proto.Message): r"""A Mutation which deletes cells from the specified column, optionally restricting the deletions to a given timestamp range. @@ -964,6 +1059,12 @@ class DeleteFromRow(proto.Message): oneof="mutation", message=SetCell, ) + add_to_cell: AddToCell = proto.Field( + proto.MESSAGE, + number=5, + oneof="mutation", + message=AddToCell, + ) delete_from_column: DeleteFromColumn = proto.Field( proto.MESSAGE, number=2, diff --git a/google/cloud/bigtable_v2/types/feature_flags.py b/google/cloud/bigtable_v2/types/feature_flags.py index 45e673f75..bad6c163b 100644 --- a/google/cloud/bigtable_v2/types/feature_flags.py +++ b/google/cloud/bigtable_v2/types/feature_flags.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -67,6 +67,9 @@ class FeatureFlags(proto.Message): Notify the server that the client supports using retry info back off durations to retry requests with. + client_side_metrics_enabled (bool): + Notify the server that the client has client + side metrics enabled. """ reverse_scans: bool = proto.Field( @@ -93,6 +96,10 @@ class FeatureFlags(proto.Message): proto.BOOL, number=7, ) + client_side_metrics_enabled: bool = proto.Field( + proto.BOOL, + number=8, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigtable_v2/types/request_stats.py b/google/cloud/bigtable_v2/types/request_stats.py index 61cce9491..115f76af5 100644 --- a/google/cloud/bigtable_v2/types/request_stats.py +++ b/google/cloud/bigtable_v2/types/request_stats.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/bigtable_v2/types/response_params.py b/google/cloud/bigtable_v2/types/response_params.py index 98e3a67db..3bbf3163f 100644 --- a/google/cloud/bigtable_v2/types/response_params.py +++ b/google/cloud/bigtable_v2/types/response_params.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index daf730a9a..f175c66da 100644 --- a/noxfile.py +++ b/noxfile.py @@ -55,7 +55,7 @@ "google-cloud-testutils", ] SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ - "pytest-asyncio", + "pytest-asyncio==0.21.2", ] SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] SYSTEM_TEST_DEPENDENCIES: List[str] = [] diff --git a/owlbot.py b/owlbot.py index 3fb079396..cde9fce64 100644 --- a/owlbot.py +++ b/owlbot.py @@ -91,7 +91,7 @@ def get_staging_dirs( microgenerator=True, cov_level=99, system_test_external_dependencies=[ - "pytest-asyncio", + "pytest-asyncio==0.21.2", ], ) @@ -218,6 +218,54 @@ def lint_setup_py(session): ''', ) + +# ---------------------------------------------------------------------------- +# Customize gapics to include PooledBigtableGrpcAsyncIOTransport +# ---------------------------------------------------------------------------- +def insert(file, before_line, insert_line, after_line, escape=None): + target = before_line + "\n" + after_line + if escape: + for c in escape: + target = target.replace(c, '\\' + c) + replacement = before_line + "\n" + insert_line + "\n" + after_line + s.replace(file, target, replacement) + + +insert( + "google/cloud/bigtable_v2/services/bigtable/client.py", + "from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport", + "from .transports.pooled_grpc_asyncio import PooledBigtableGrpcAsyncIOTransport", + "from .transports.rest import BigtableRestTransport" +) +insert( + "google/cloud/bigtable_v2/services/bigtable/client.py", + ' _transport_registry["grpc_asyncio"] = BigtableGrpcAsyncIOTransport', + ' _transport_registry["pooled_grpc_asyncio"] = PooledBigtableGrpcAsyncIOTransport', + ' _transport_registry["rest"] = BigtableRestTransport', + escape='[]"' +) +insert( + "google/cloud/bigtable_v2/services/bigtable/transports/__init__.py", + '_transport_registry["grpc_asyncio"] = BigtableGrpcAsyncIOTransport', + '_transport_registry["pooled_grpc_asyncio"] = PooledBigtableGrpcAsyncIOTransport', + '_transport_registry["rest"] = BigtableRestTransport', + escape='[]"' +) +insert( + "google/cloud/bigtable_v2/services/bigtable/transports/__init__.py", + "from .grpc_asyncio import BigtableGrpcAsyncIOTransport", + "from .pooled_grpc_asyncio import PooledBigtableGrpcAsyncIOTransport", + "from .rest import BigtableRestTransport" +) +insert( + "google/cloud/bigtable_v2/services/bigtable/transports/__init__.py", + ' "BigtableGrpcAsyncIOTransport",', + ' "PooledBigtableGrpcAsyncIOTransport",', + ' "BigtableRestTransport",', + escape='"' +) + + # ---------------------------------------------------------------------------- # Samples templates # ---------------------------------------------------------------------------- diff --git a/scripts/fixup_bigtable_admin_v2_keywords.py b/scripts/fixup_bigtable_admin_v2_keywords.py index 8c3efea10..073b1ad00 100644 --- a/scripts/fixup_bigtable_admin_v2_keywords.py +++ b/scripts/fixup_bigtable_admin_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,15 +39,17 @@ def partition( class bigtable_adminCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'check_consistency': ('name', 'consistency_token', ), + 'check_consistency': ('name', 'consistency_token', 'standard_read_remote_writes', 'data_boost_read_local_writes', ), 'copy_backup': ('parent', 'backup_id', 'source_backup', 'expire_time', ), 'create_app_profile': ('parent', 'app_profile_id', 'app_profile', 'ignore_warnings', ), + 'create_authorized_view': ('parent', 'authorized_view_id', 'authorized_view', ), 'create_backup': ('parent', 'backup_id', 'backup', ), 'create_cluster': ('parent', 'cluster_id', 'cluster', ), 'create_instance': ('parent', 'instance_id', 'instance', 'clusters', ), 'create_table': ('parent', 'table_id', 'table', 'initial_splits', ), 'create_table_from_snapshot': ('parent', 'table_id', 'source_snapshot', ), 'delete_app_profile': ('name', 'ignore_warnings', ), + 'delete_authorized_view': ('name', 'etag', ), 'delete_backup': ('name', ), 'delete_cluster': ('name', ), 'delete_instance': ('name', ), @@ -56,6 +58,7 @@ class bigtable_adminCallTransformer(cst.CSTTransformer): 'drop_row_range': ('name', 'row_key_prefix', 'delete_all_data_from_table', ), 'generate_consistency_token': ('name', ), 'get_app_profile': ('name', ), + 'get_authorized_view': ('name', 'view', ), 'get_backup': ('name', ), 'get_cluster': ('name', ), 'get_iam_policy': ('resource', 'options', ), @@ -63,6 +66,7 @@ class bigtable_adminCallTransformer(cst.CSTTransformer): 'get_snapshot': ('name', ), 'get_table': ('name', 'view', ), 'list_app_profiles': ('parent', 'page_size', 'page_token', ), + 'list_authorized_views': ('parent', 'page_size', 'page_token', 'view', ), 'list_backups': ('parent', 'filter', 'order_by', 'page_size', 'page_token', ), 'list_clusters': ('parent', 'page_token', ), 'list_hot_tablets': ('parent', 'start_time', 'end_time', 'page_size', 'page_token', ), @@ -78,6 +82,7 @@ class bigtable_adminCallTransformer(cst.CSTTransformer): 'test_iam_permissions': ('resource', 'permissions', ), 'undelete_table': ('name', ), 'update_app_profile': ('app_profile', 'update_mask', 'ignore_warnings', ), + 'update_authorized_view': ('authorized_view', 'update_mask', 'ignore_warnings', ), 'update_backup': ('backup', 'update_mask', ), 'update_cluster': ('name', 'location', 'state', 'serve_nodes', 'cluster_config', 'default_storage_type', 'encryption_config', ), 'update_instance': ('display_name', 'name', 'state', 'type_', 'labels', 'create_time', 'satisfies_pzs', ), diff --git a/scripts/fixup_bigtable_v2_keywords.py b/scripts/fixup_bigtable_v2_keywords.py index 8d32e5b70..3d1381c49 100644 --- a/scripts/fixup_bigtable_v2_keywords.py +++ b/scripts/fixup_bigtable_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,15 +39,15 @@ def partition( class bigtableCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'check_and_mutate_row': ('table_name', 'row_key', 'app_profile_id', 'predicate_filter', 'true_mutations', 'false_mutations', ), + 'check_and_mutate_row': ('row_key', 'table_name', 'authorized_view_name', 'app_profile_id', 'predicate_filter', 'true_mutations', 'false_mutations', ), 'generate_initial_change_stream_partitions': ('table_name', 'app_profile_id', ), - 'mutate_row': ('table_name', 'row_key', 'mutations', 'app_profile_id', ), - 'mutate_rows': ('table_name', 'entries', 'app_profile_id', ), + 'mutate_row': ('row_key', 'mutations', 'table_name', 'authorized_view_name', 'app_profile_id', ), + 'mutate_rows': ('entries', 'table_name', 'authorized_view_name', 'app_profile_id', ), 'ping_and_warm': ('name', 'app_profile_id', ), 'read_change_stream': ('table_name', 'app_profile_id', 'partition', 'start_time', 'continuation_tokens', 'end_time', 'heartbeat_duration', ), - 'read_modify_write_row': ('table_name', 'row_key', 'rules', 'app_profile_id', ), - 'read_rows': ('table_name', 'app_profile_id', 'rows', 'filter', 'rows_limit', 'request_stats_view', 'reversed', ), - 'sample_row_keys': ('table_name', 'app_profile_id', ), + 'read_modify_write_row': ('row_key', 'rules', 'table_name', 'authorized_view_name', 'app_profile_id', ), + 'read_rows': ('table_name', 'authorized_view_name', 'app_profile_id', 'rows', 'filter', 'rows_limit', 'request_stats_view', 'reversed', ), + 'sample_row_keys': ('table_name', 'authorized_view_name', 'app_profile_id', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index ee858c3ec..d96846bb5 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -11,4 +11,3 @@ grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 libcst==0.2.5 protobuf==3.19.5 -pytest-asyncio==0.21.1 diff --git a/tests/__init__.py b/tests/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/system/data/test_system.py b/tests/system/data/test_system.py index aeb08fc1a..9fe208551 100644 --- a/tests/system/data/test_system.py +++ b/tests/system/data/test_system.py @@ -140,7 +140,6 @@ async def _create_row_and_mutation( return row_key, mutation -@pytest.mark.usefixtures("table") @pytest_asyncio.fixture(scope="function") async def temp_rows(table): builder = TempRowBuilder(table) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/bigtable_admin_v2/__init__.py b/tests/unit/gapic/bigtable_admin_v2/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/tests/unit/gapic/bigtable_admin_v2/__init__.py +++ b/tests/unit/gapic/bigtable_admin_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py index 10e9d101b..9a418047f 100644 --- a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py +++ b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1241,7 +1241,8 @@ def test_create_instance(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.CreateInstanceRequest() + request = bigtable_instance_admin.CreateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1257,12 +1258,155 @@ def test_create_instance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.CreateInstanceRequest() +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + ) + + +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_instance + ] = mock_object + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", @@ -1288,7 +1432,8 @@ async def test_create_instance_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.CreateInstanceRequest() + request = bigtable_instance_admin.CreateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1504,7 +1649,8 @@ def test_get_instance(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.GetInstanceRequest() + request = bigtable_instance_admin.GetInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.Instance) @@ -1525,12 +1671,151 @@ def test_get_instance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.GetInstanceRequest() +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.GetInstanceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.GetInstanceRequest( + name="name_value", + ) + + +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.Instance( + name="name_value", + display_name="display_name_value", + state=instance.Instance.State.READY, + type_=instance.Instance.Type.PRODUCTION, + satisfies_pzs=True, + ) + ) + response = await client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_instance + ] = mock_object + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", @@ -1562,7 +1847,8 @@ async def test_get_instance_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.GetInstanceRequest() + request = bigtable_instance_admin.GetInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.Instance) @@ -1746,7 +2032,8 @@ def test_list_instances(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListInstancesRequest() + request = bigtable_instance_admin.ListInstancesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response.raw_page is response @@ -1765,12 +2052,150 @@ def test_list_instances_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.ListInstancesRequest() +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListInstancesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_instances_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_instance_admin.ListInstancesResponse( + failed_locations=["failed_locations_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_instances + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_instances + ] = mock_object + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", @@ -1799,7 +2224,8 @@ async def test_list_instances_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListInstancesRequest() + request = bigtable_instance_admin.ListInstancesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable_instance_admin.ListInstancesResponse) @@ -1987,7 +2413,8 @@ def test_update_instance(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == instance.Instance() + request = instance.Instance() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.Instance) @@ -2008,17 +2435,158 @@ def test_update_instance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == instance.Instance() -@pytest.mark.asyncio -async def test_update_instance_async( - transport: str = "grpc_asyncio", request_type=instance.Instance -): - client = BigtableInstanceAdminAsyncClient( +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = instance.Instance( + name="name_value", + display_name="display_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == instance.Instance( + name="name_value", + display_name="display_name_value", + ) + + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.Instance( + name="name_value", + display_name="display_name_value", + state=instance.Instance.State.READY, + type_=instance.Instance.Type.PRODUCTION, + satisfies_pzs=True, + ) + ) + response = await client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == instance.Instance() + + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_instance + ] = mock_object + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=instance.Instance +): + client = BigtableInstanceAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2044,7 +2612,8 @@ async def test_update_instance_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == instance.Instance() + request = instance.Instance() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.Instance) @@ -2147,7 +2716,8 @@ def test_partial_update_instance(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.PartialUpdateInstanceRequest() + request = bigtable_instance_admin.PartialUpdateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2165,12 +2735,158 @@ def test_partial_update_instance_empty_call(): with mock.patch.object( type(client.transport.partial_update_instance), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.partial_update_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.PartialUpdateInstanceRequest() +def test_partial_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.PartialUpdateInstanceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partial_update_instance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.partial_update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.PartialUpdateInstanceRequest() + + +def test_partial_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.partial_update_instance + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.partial_update_instance + ] = mock_rpc + request = {} + client.partial_update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.partial_update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_partial_update_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partial_update_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.partial_update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.PartialUpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_partial_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.partial_update_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.partial_update_instance + ] = mock_object + + request = {} + await client.partial_update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.partial_update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_partial_update_instance_async( transport: str = "grpc_asyncio", @@ -2198,7 +2914,8 @@ async def test_partial_update_instance_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.PartialUpdateInstanceRequest() + request = bigtable_instance_admin.PartialUpdateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2396,7 +3113,8 @@ def test_delete_instance(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.DeleteInstanceRequest() + request = bigtable_instance_admin.DeleteInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2412,12 +3130,143 @@ def test_delete_instance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.DeleteInstanceRequest() +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.DeleteInstanceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.DeleteInstanceRequest( + name="name_value", + ) + + +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_instance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_instance + ] = mock_object + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", @@ -2441,7 +3290,8 @@ async def test_delete_instance_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.DeleteInstanceRequest() + request = bigtable_instance_admin.DeleteInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2617,7 +3467,8 @@ def test_create_cluster(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.CreateClusterRequest() + request = bigtable_instance_admin.CreateClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2633,18 +3484,161 @@ def test_create_cluster_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_cluster() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.CreateClusterRequest() -@pytest.mark.asyncio -async def test_create_cluster_async( - transport: str = "grpc_asyncio", - request_type=bigtable_instance_admin.CreateClusterRequest, -): - client = BigtableInstanceAdminAsyncClient( +def test_create_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.CreateClusterRequest( + parent="parent_value", + cluster_id="cluster_id_value", + ) + + +def test_create_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + request = {} + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_cluster + ] = mock_object + + request = {} + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", + request_type=bigtable_instance_admin.CreateClusterRequest, +): + client = BigtableInstanceAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2664,7 +3658,8 @@ async def test_create_cluster_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.CreateClusterRequest() + request = bigtable_instance_admin.CreateClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2870,7 +3865,8 @@ def test_get_cluster(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.GetClusterRequest() + request = bigtable_instance_admin.GetClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.Cluster) @@ -2891,12 +3887,151 @@ def test_get_cluster_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_cluster() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.GetClusterRequest() +def test_get_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.GetClusterRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.GetClusterRequest( + name="name_value", + ) + + +def test_get_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.Cluster( + name="name_value", + location="location_value", + state=instance.Cluster.State.READY, + serve_nodes=1181, + default_storage_type=common.StorageType.SSD, + ) + ) + response = await client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cluster + ] = mock_object + + request = {} + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_cluster_async( transport: str = "grpc_asyncio", @@ -2928,7 +4063,8 @@ async def test_get_cluster_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.GetClusterRequest() + request = bigtable_instance_admin.GetClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.Cluster) @@ -3112,7 +4248,8 @@ def test_list_clusters(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListClustersRequest() + request = bigtable_instance_admin.ListClustersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response.raw_page is response @@ -3131,12 +4268,150 @@ def test_list_clusters_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_clusters() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.ListClustersRequest() +def test_list_clusters_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.ListClustersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_clusters(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListClustersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_clusters_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_clusters_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_instance_admin.ListClustersResponse( + failed_locations=["failed_locations_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_clusters + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_clusters + ] = mock_object + + request = {} + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_clusters_async( transport: str = "grpc_asyncio", @@ -3165,7 +4440,8 @@ async def test_list_clusters_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListClustersRequest() + request = bigtable_instance_admin.ListClustersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable_instance_admin.ListClustersResponse) @@ -3347,7 +4623,8 @@ def test_update_cluster(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == instance.Cluster() + request = instance.Cluster() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3363,12 +4640,155 @@ def test_update_cluster_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_cluster() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == instance.Cluster() +def test_update_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = instance.Cluster( + name="name_value", + location="location_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == instance.Cluster( + name="name_value", + location="location_value", + ) + + +def test_update_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + request = {} + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == instance.Cluster() + + +@pytest.mark.asyncio +async def test_update_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cluster + ] = mock_object + + request = {} + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_cluster_async( transport: str = "grpc_asyncio", request_type=instance.Cluster @@ -3393,7 +4813,8 @@ async def test_update_cluster_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == instance.Cluster() + request = instance.Cluster() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3493,30 +4914,177 @@ def test_partial_update_cluster(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + request = bigtable_instance_admin.PartialUpdateClusterRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_partial_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partial_update_cluster), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.partial_update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.PartialUpdateClusterRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) +def test_partial_update_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.PartialUpdateClusterRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partial_update_cluster), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.partial_update_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.PartialUpdateClusterRequest() + + +def test_partial_update_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.partial_update_cluster + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.partial_update_cluster + ] = mock_rpc + request = {} + client.partial_update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.partial_update_cluster(request) -def test_partial_update_cluster_empty_call(): + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_partial_update_cluster_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = BigtableInstanceAdminClient( + client = BigtableInstanceAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.partial_update_cluster), "__call__" ) as call: - client.partial_update_cluster() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.partial_update_cluster() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.PartialUpdateClusterRequest() +@pytest.mark.asyncio +async def test_partial_update_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.partial_update_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.partial_update_cluster + ] = mock_object + + request = {} + await client.partial_update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.partial_update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_partial_update_cluster_async( transport: str = "grpc_asyncio", @@ -3544,7 +5112,8 @@ async def test_partial_update_cluster_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.PartialUpdateClusterRequest() + request = bigtable_instance_admin.PartialUpdateClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3742,7 +5311,8 @@ def test_delete_cluster(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.DeleteClusterRequest() + request = bigtable_instance_admin.DeleteClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3758,12 +5328,143 @@ def test_delete_cluster_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_cluster() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.DeleteClusterRequest() +def test_delete_cluster_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.DeleteClusterRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_cluster(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.DeleteClusterRequest( + name="name_value", + ) + + +def test_delete_cluster_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + request = {} + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cluster_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_cluster + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_cluster + ] = mock_object + + request = {} + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_cluster_async( transport: str = "grpc_asyncio", @@ -3787,7 +5488,8 @@ async def test_delete_cluster_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.DeleteClusterRequest() + request = bigtable_instance_admin.DeleteClusterRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3970,7 +5672,8 @@ def test_create_app_profile(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.CreateAppProfileRequest() + request = bigtable_instance_admin.CreateAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.AppProfile) @@ -3991,12 +5694,159 @@ def test_create_app_profile_empty_call(): with mock.patch.object( type(client.transport.create_app_profile), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_app_profile() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.CreateAppProfileRequest() +def test_create_app_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.CreateAppProfileRequest( + parent="parent_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_app_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_app_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.CreateAppProfileRequest( + parent="parent_value", + app_profile_id="app_profile_id_value", + ) + + +def test_create_app_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_app_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_app_profile + ] = mock_rpc + request = {} + client.create_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_app_profile_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_app_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.AppProfile( + name="name_value", + etag="etag_value", + description="description_value", + ) + ) + response = await client.create_app_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.CreateAppProfileRequest() + + +@pytest.mark.asyncio +async def test_create_app_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_app_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_app_profile + ] = mock_object + + request = {} + await client.create_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_app_profile_async( transport: str = "grpc_asyncio", @@ -4028,7 +5878,8 @@ async def test_create_app_profile_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.CreateAppProfileRequest() + request = bigtable_instance_admin.CreateAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.AppProfile) @@ -4240,7 +6091,8 @@ def test_get_app_profile(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.GetAppProfileRequest() + request = bigtable_instance_admin.GetAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.AppProfile) @@ -4254,17 +6106,154 @@ def test_get_app_profile_empty_call(): # i.e. request == None and no flattened fields passed, work. client = BigtableInstanceAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_app_profile), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_app_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.GetAppProfileRequest() + + +def test_get_app_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.GetAppProfileRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_app_profile), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_app_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.GetAppProfileRequest( + name="name_value", + ) + + +def test_get_app_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_app_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_app_profile] = mock_rpc + request = {} + client.get_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_app_profile_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_app_profile), "__call__") as call: - client.get_app_profile() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.AppProfile( + name="name_value", + etag="etag_value", + description="description_value", + ) + ) + response = await client.get_app_profile() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.GetAppProfileRequest() +@pytest.mark.asyncio +async def test_get_app_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_app_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_app_profile + ] = mock_object + + request = {} + await client.get_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_app_profile_async( transport: str = "grpc_asyncio", @@ -4294,7 +6283,8 @@ async def test_get_app_profile_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.GetAppProfileRequest() + request = bigtable_instance_admin.GetAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, instance.AppProfile) @@ -4478,7 +6468,8 @@ def test_list_app_profiles(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListAppProfilesRequest() + request = bigtable_instance_admin.ListAppProfilesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAppProfilesPager) @@ -4498,12 +6489,156 @@ def test_list_app_profiles_empty_call(): with mock.patch.object( type(client.transport.list_app_profiles), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_app_profiles() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.ListAppProfilesRequest() +def test_list_app_profiles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.ListAppProfilesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_app_profiles), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_app_profiles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListAppProfilesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_app_profiles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_app_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_app_profiles + ] = mock_rpc + request = {} + client.list_app_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_app_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_app_profiles_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_app_profiles), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_instance_admin.ListAppProfilesResponse( + next_page_token="next_page_token_value", + failed_locations=["failed_locations_value"], + ) + ) + response = await client.list_app_profiles() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListAppProfilesRequest() + + +@pytest.mark.asyncio +async def test_list_app_profiles_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_app_profiles + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_app_profiles + ] = mock_object + + request = {} + await client.list_app_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_app_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_app_profiles_async( transport: str = "grpc_asyncio", @@ -4534,7 +6669,8 @@ async def test_list_app_profiles_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListAppProfilesRequest() + request = bigtable_instance_admin.ListAppProfilesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAppProfilesAsyncPager) @@ -4924,7 +7060,8 @@ def test_update_app_profile(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.UpdateAppProfileRequest() + request = bigtable_instance_admin.UpdateAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -4942,12 +7079,157 @@ def test_update_app_profile_empty_call(): with mock.patch.object( type(client.transport.update_app_profile), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_app_profile() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.UpdateAppProfileRequest() +def test_update_app_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.UpdateAppProfileRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_app_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_app_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.UpdateAppProfileRequest() + + +def test_update_app_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_app_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_app_profile + ] = mock_rpc + request = {} + client.update_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_app_profile_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_app_profile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_app_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.UpdateAppProfileRequest() + + +@pytest.mark.asyncio +async def test_update_app_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_app_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_app_profile + ] = mock_object + + request = {} + await client.update_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_app_profile_async( transport: str = "grpc_asyncio", @@ -4975,7 +7257,8 @@ async def test_update_app_profile_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.UpdateAppProfileRequest() + request = bigtable_instance_admin.UpdateAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5173,32 +7456,172 @@ def test_delete_app_profile(request_type, transport: str = "grpc"): response = client.delete_app_profile(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.DeleteAppProfileRequest() + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable_instance_admin.DeleteAppProfileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_app_profile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_app_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_app_profile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.DeleteAppProfileRequest() + + +def test_delete_app_profile_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.DeleteAppProfileRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_app_profile), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_app_profile(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.DeleteAppProfileRequest( + name="name_value", + ) + + +def test_delete_app_profile_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_app_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_app_profile + ] = mock_rpc + request = {} + client.delete_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the response is the type that we expect. - assert response is None + client.delete_app_profile(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_delete_app_profile_empty_call(): + +@pytest.mark.asyncio +async def test_delete_app_profile_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = BigtableInstanceAdminClient( + client = BigtableInstanceAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_app_profile), "__call__" ) as call: - client.delete_app_profile() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_app_profile() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.DeleteAppProfileRequest() +@pytest.mark.asyncio +async def test_delete_app_profile_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_app_profile + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_app_profile + ] = mock_object + + request = {} + await client.delete_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_app_profile_async( transport: str = "grpc_asyncio", @@ -5224,7 +7647,8 @@ async def test_delete_app_profile_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.DeleteAppProfileRequest() + request = bigtable_instance_admin.DeleteAppProfileRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -5411,7 +7835,8 @@ def test_get_iam_policy(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) @@ -5429,12 +7854,148 @@ def test_get_iam_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.GetIamPolicyRequest() +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy + ] = mock_object + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest @@ -5462,7 +8023,8 @@ async def test_get_iam_policy_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) @@ -5660,7 +8222,8 @@ def test_set_iam_policy(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) @@ -5678,12 +8241,148 @@ def test_set_iam_policy_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.set_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.SetIamPolicyRequest() +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_object + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_set_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest @@ -5711,7 +8410,8 @@ async def test_set_iam_policy_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) @@ -5891,51 +8591,195 @@ async def test_set_iam_policy_flattened_error_async(): def test_test_iam_permissions(request_type, transport: str = "grpc"): client = BigtableInstanceAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = client.test_iam_permissions(request) + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) + + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + request = {} + client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + assert mock_rpc.call_count == 1 - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + client.test_iam_permissions(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_test_iam_permissions_empty_call(): + +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = BigtableInstanceAdminClient( + client = BigtableInstanceAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - client.test_iam_permissions() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.test_iam_permissions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.test_iam_permissions + ] = mock_object + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_test_iam_permissions_async( transport: str = "grpc_asyncio", @@ -5965,7 +8809,8 @@ async def test_test_iam_permissions_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) @@ -6185,7 +9030,8 @@ def test_list_hot_tablets(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListHotTabletsRequest() + request = bigtable_instance_admin.ListHotTabletsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListHotTabletsPager) @@ -6202,12 +9048,151 @@ def test_list_hot_tablets_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_hot_tablets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_hot_tablets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_instance_admin.ListHotTabletsRequest() +def test_list_hot_tablets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_instance_admin.ListHotTabletsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hot_tablets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_hot_tablets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListHotTabletsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_hot_tablets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_hot_tablets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_hot_tablets + ] = mock_rpc + request = {} + client.list_hot_tablets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_hot_tablets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_hot_tablets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_hot_tablets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_instance_admin.ListHotTabletsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_hot_tablets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_instance_admin.ListHotTabletsRequest() + + +@pytest.mark.asyncio +async def test_list_hot_tablets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_hot_tablets + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_hot_tablets + ] = mock_object + + request = {} + await client.list_hot_tablets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_hot_tablets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_hot_tablets_async( transport: str = "grpc_asyncio", @@ -6235,7 +9220,8 @@ async def test_list_hot_tablets_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_instance_admin.ListHotTabletsRequest() + request = bigtable_instance_admin.ListHotTabletsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListHotTabletsAsyncPager) @@ -6615,6 +9601,46 @@ def test_create_instance_rest(request_type): assert response.operation.name == "operations/spam" +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_instance_rest_required_fields( request_type=bigtable_instance_admin.CreateInstanceRequest, ): @@ -6909,6 +9935,42 @@ def test_get_instance_rest(request_type): assert response.satisfies_pzs is True +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_instance_rest_required_fields( request_type=bigtable_instance_admin.GetInstanceRequest, ): @@ -7179,6 +10241,42 @@ def test_list_instances_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_instances_rest_required_fields( request_type=bigtable_instance_admin.ListInstancesRequest, ): @@ -7461,6 +10559,42 @@ def test_update_instance_rest(request_type): assert response.satisfies_pzs is True +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_instance_rest_required_fields(request_type=instance.Instance): transport_class = transports.BigtableInstanceAdminRestTransport @@ -7742,6 +10876,51 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_partial_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.partial_update_instance + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.partial_update_instance + ] = mock_rpc + + request = {} + client.partial_update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.partial_update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_partial_update_instance_rest_required_fields( request_type=bigtable_instance_admin.PartialUpdateInstanceRequest, ): @@ -8008,8 +11187,44 @@ def test_delete_instance_rest(request_type): req.return_value = response_value response = client.delete_instance(request) - # Establish that the response is the type that we expect. - assert response is None + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 def test_delete_instance_rest_required_fields( @@ -8349,6 +11564,46 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_create_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_cluster] = mock_rpc + + request = {} + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_cluster_rest_required_fields( request_type=bigtable_instance_admin.CreateClusterRequest, ): @@ -8652,6 +11907,42 @@ def test_get_cluster_rest(request_type): assert response.default_storage_type == common.StorageType.SSD +def test_get_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cluster] = mock_rpc + + request = {} + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_cluster_rest_required_fields( request_type=bigtable_instance_admin.GetClusterRequest, ): @@ -8923,6 +12214,42 @@ def test_list_clusters_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_clusters_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_clusters in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_clusters] = mock_rpc + + request = {} + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_clusters(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_clusters_rest_required_fields( request_type=bigtable_instance_admin.ListClustersRequest, ): @@ -9191,6 +12518,46 @@ def test_update_cluster_rest(request_type): assert response.operation.name == "operations/spam" +def test_update_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cluster] = mock_rpc + + request = {} + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_cluster_rest_interceptors(null_interceptor): transport = transports.BigtableInstanceAdminRestTransport( @@ -9404,6 +12771,51 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_partial_update_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.partial_update_cluster + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.partial_update_cluster + ] = mock_rpc + + request = {} + client.partial_update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.partial_update_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_partial_update_cluster_rest_required_fields( request_type=bigtable_instance_admin.PartialUpdateClusterRequest, ): @@ -9679,6 +13091,42 @@ def test_delete_cluster_rest(request_type): assert response is None +def test_delete_cluster_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_cluster in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_cluster] = mock_rpc + + request = {} + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cluster(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_cluster_rest_required_fields( request_type=bigtable_instance_admin.DeleteClusterRequest, ): @@ -9923,6 +13371,7 @@ def test_create_app_profile_rest(request_type): }, "priority": 1, "standard_isolation": {"priority": 1}, + "data_boost_isolation_read_only": {"compute_billing_owner": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -10023,6 +13472,46 @@ def get_message_fields(field): assert response.description == "description_value" +def test_create_app_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_app_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_app_profile + ] = mock_rpc + + request = {} + client.create_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_app_profile_rest_required_fields( request_type=bigtable_instance_admin.CreateAppProfileRequest, ): @@ -10336,6 +13825,42 @@ def test_get_app_profile_rest(request_type): assert response.description == "description_value" +def test_get_app_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_app_profile in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_app_profile] = mock_rpc + + request = {} + client.get_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_app_profile_rest_required_fields( request_type=bigtable_instance_admin.GetAppProfileRequest, ): @@ -10608,6 +14133,44 @@ def test_list_app_profiles_rest(request_type): assert response.failed_locations == ["failed_locations_value"] +def test_list_app_profiles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_app_profiles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_app_profiles + ] = mock_rpc + + request = {} + client.list_app_profiles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_app_profiles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_app_profiles_rest_required_fields( request_type=bigtable_instance_admin.ListAppProfilesRequest, ): @@ -10946,6 +14509,7 @@ def test_update_app_profile_rest(request_type): }, "priority": 1, "standard_isolation": {"priority": 1}, + "data_boost_isolation_read_only": {"compute_billing_owner": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -11036,6 +14600,50 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_update_app_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_app_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_app_profile + ] = mock_rpc + + request = {} + client.update_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_app_profile_rest_required_fields( request_type=bigtable_instance_admin.UpdateAppProfileRequest, ): @@ -11325,6 +14933,46 @@ def test_delete_app_profile_rest(request_type): assert response is None +def test_delete_app_profile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_app_profile in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_app_profile + ] = mock_rpc + + request = {} + client.delete_app_profile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_app_profile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_app_profile_rest_required_fields( request_type=bigtable_instance_admin.DeleteAppProfileRequest, ): @@ -11608,6 +15256,42 @@ def test_get_iam_policy_rest(request_type): assert response.etag == b"etag_blob" +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=iam_policy_pb2.GetIamPolicyRequest, ): @@ -11871,6 +15555,42 @@ def test_set_iam_policy_rest(request_type): assert response.etag == b"etag_blob" +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=iam_policy_pb2.SetIamPolicyRequest, ): @@ -12140,6 +15860,46 @@ def test_test_iam_permissions_rest(request_type): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=iam_policy_pb2.TestIamPermissionsRequest, ): @@ -12419,6 +16179,44 @@ def test_list_hot_tablets_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_hot_tablets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableInstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_hot_tablets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_hot_tablets + ] = mock_rpc + + request = {} + client.list_hot_tablets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_hot_tablets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_hot_tablets_rest_required_fields( request_type=bigtable_instance_admin.ListHotTabletsRequest, ): diff --git a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py index 67f02f9ce..455ec88d8 100644 --- a/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py +++ b/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -60,6 +60,7 @@ from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table +from google.cloud.bigtable_admin_v2.types import types from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -1218,7 +1219,8 @@ def test_create_table(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateTableRequest() + request = bigtable_table_admin.CreateTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gba_table.Table) @@ -1237,12 +1239,151 @@ def test_create_table_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_table() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.CreateTableRequest() +def test_create_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateTableRequest( + parent="parent_value", + table_id="table_id_value", + ) + + +def test_create_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_table] = mock_rpc + request = {} + client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gba_table.Table( + name="name_value", + granularity=gba_table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, + ) + ) + response = await client.create_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateTableRequest() + + +@pytest.mark.asyncio +async def test_create_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_table + ] = mock_object + + request = {} + await client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_table_async( transport: str = "grpc_asyncio", @@ -1272,7 +1413,8 @@ async def test_create_table_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateTableRequest() + request = bigtable_table_admin.CreateTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gba_table.Table) @@ -1473,7 +1615,8 @@ def test_create_table_from_snapshot(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateTableFromSnapshotRequest() + request = bigtable_table_admin.CreateTableFromSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1491,12 +1634,166 @@ def test_create_table_from_snapshot_empty_call(): with mock.patch.object( type(client.transport.create_table_from_snapshot), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_table_from_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.CreateTableFromSnapshotRequest() +def test_create_table_from_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.CreateTableFromSnapshotRequest( + parent="parent_value", + table_id="table_id_value", + source_snapshot="source_snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_table_from_snapshot), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_table_from_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateTableFromSnapshotRequest( + parent="parent_value", + table_id="table_id_value", + source_snapshot="source_snapshot_value", + ) + + +def test_create_table_from_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_table_from_snapshot + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_table_from_snapshot + ] = mock_rpc + request = {} + client.create_table_from_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_table_from_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_table_from_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_table_from_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_table_from_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateTableFromSnapshotRequest() + + +@pytest.mark.asyncio +async def test_create_table_from_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_table_from_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_table_from_snapshot + ] = mock_object + + request = {} + await client.create_table_from_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_table_from_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_table_from_snapshot_async( transport: str = "grpc_asyncio", @@ -1524,7 +1821,8 @@ async def test_create_table_from_snapshot_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateTableFromSnapshotRequest() + request = bigtable_table_admin.CreateTableFromSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1734,7 +2032,8 @@ def test_list_tables(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListTablesRequest() + request = bigtable_table_admin.ListTablesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTablesPager) @@ -1751,12 +2050,149 @@ def test_list_tables_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_tables() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.ListTablesRequest() +def test_list_tables_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.ListTablesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_tables(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.ListTablesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_tables_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tables in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_tables] = mock_rpc + request = {} + client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tables(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_tables_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListTablesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tables() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.ListTablesRequest() + + +@pytest.mark.asyncio +async def test_list_tables_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_tables + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_tables + ] = mock_object + + request = {} + await client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_tables(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_tables_async( transport: str = "grpc_asyncio", request_type=bigtable_table_admin.ListTablesRequest @@ -1783,7 +2219,8 @@ async def test_list_tables_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListTablesRequest() + request = bigtable_table_admin.ListTablesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTablesAsyncPager) @@ -2158,7 +2595,8 @@ def test_get_table(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetTableRequest() + request = bigtable_table_admin.GetTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, table.Table) @@ -2177,13 +2615,148 @@ def test_get_table_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_table() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.GetTableRequest() -@pytest.mark.asyncio +def test_get_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.GetTableRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetTableRequest( + name="name_value", + ) + + +def test_get_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_table] = mock_rpc + request = {} + client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.Table( + name="name_value", + granularity=table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, + ) + ) + response = await client.get_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetTableRequest() + + +@pytest.mark.asyncio +async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_table + ] = mock_object + + request = {} + await client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio async def test_get_table_async( transport: str = "grpc_asyncio", request_type=bigtable_table_admin.GetTableRequest ): @@ -2211,7 +2784,8 @@ async def test_get_table_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetTableRequest() + request = bigtable_table_admin.GetTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, table.Table) @@ -2390,7 +2964,8 @@ def test_update_table(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.UpdateTableRequest() + request = bigtable_table_admin.UpdateTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2406,12 +2981,149 @@ def test_update_table_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_table() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.UpdateTableRequest() +def test_update_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.UpdateTableRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UpdateTableRequest() + + +def test_update_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_table] = mock_rpc + request = {} + client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UpdateTableRequest() + + +@pytest.mark.asyncio +async def test_update_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_table + ] = mock_object + + request = {} + await client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_table_async( transport: str = "grpc_asyncio", @@ -2437,7 +3149,8 @@ async def test_update_table_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.UpdateTableRequest() + request = bigtable_table_admin.UpdateTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2627,7 +3340,8 @@ def test_delete_table(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteTableRequest() + request = bigtable_table_admin.DeleteTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2643,12 +3357,143 @@ def test_delete_table_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_table() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.DeleteTableRequest() +def test_delete_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.DeleteTableRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DeleteTableRequest( + name="name_value", + ) + + +def test_delete_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_table] = mock_rpc + request = {} + client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DeleteTableRequest() + + +@pytest.mark.asyncio +async def test_delete_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_table + ] = mock_object + + request = {} + await client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_table_async( transport: str = "grpc_asyncio", @@ -2672,7 +3517,8 @@ async def test_delete_table_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteTableRequest() + request = bigtable_table_admin.DeleteTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2848,7 +3694,8 @@ def test_undelete_table(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.UndeleteTableRequest() + request = bigtable_table_admin.UndeleteTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2864,39 +3711,181 @@ def test_undelete_table_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_table() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.UndeleteTableRequest() -@pytest.mark.asyncio -async def test_undelete_table_async( - transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.UndeleteTableRequest, -): - client = BigtableTableAdminAsyncClient( +def test_undelete_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.UndeleteTableRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_table), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = await client.undelete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + client.undelete_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UndeleteTableRequest( + name="name_value", + ) + + +def test_undelete_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.undelete_table] = mock_rpc + request = {} + client.undelete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undelete_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_undelete_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.undelete_table() + call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable_table_admin.UndeleteTableRequest() + +@pytest.mark.asyncio +async def test_undelete_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.undelete_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.undelete_table + ] = mock_object + + request = {} + await client.undelete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.undelete_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_undelete_table_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.UndeleteTableRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.undelete_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.undelete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.UndeleteTableRequest() + assert args[0] == request + # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3052,11 +4041,11 @@ async def test_undelete_table_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.ModifyColumnFamiliesRequest, + bigtable_table_admin.CreateAuthorizedViewRequest, dict, ], ) -def test_modify_column_families(request_type, transport: str = "grpc"): +def test_create_authorized_view(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3068,29 +4057,23 @@ def test_modify_column_families(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Table( - name="name_value", - granularity=table.Table.TimestampGranularity.MILLIS, - deletion_protection=True, - ) - response = client.modify_column_families(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ModifyColumnFamiliesRequest() + request = bigtable_table_admin.CreateAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Table) - assert response.name == "name_value" - assert response.granularity == table.Table.TimestampGranularity.MILLIS - assert response.deletion_protection is True + assert isinstance(response, future.Future) -def test_modify_column_families_empty_call(): +def test_create_authorized_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -3100,18 +4083,170 @@ def test_modify_column_families_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: - client.modify_column_families() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_authorized_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ModifyColumnFamiliesRequest() + assert args[0] == bigtable_table_admin.CreateAuthorizedViewRequest() + + +def test_create_authorized_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.CreateAuthorizedViewRequest( + parent="parent_value", + authorized_view_id="authorized_view_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authorized_view), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_authorized_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateAuthorizedViewRequest( + parent="parent_value", + authorized_view_id="authorized_view_id_value", + ) + + +def test_create_authorized_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_authorized_view + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_authorized_view + ] = mock_rpc + request = {} + client.create_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_modify_column_families_async( +async def test_create_authorized_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authorized_view), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_authorized_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateAuthorizedViewRequest() + + +@pytest.mark.asyncio +async def test_create_authorized_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.ModifyColumnFamiliesRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_authorized_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_authorized_view + ] = mock_object + + request = {} + await client.create_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_authorized_view_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.CreateAuthorizedViewRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3124,52 +4259,46 @@ async def test_modify_column_families_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - table.Table( - name="name_value", - granularity=table.Table.TimestampGranularity.MILLIS, - deletion_protection=True, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.modify_column_families(request) + response = await client.create_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ModifyColumnFamiliesRequest() + request = bigtable_table_admin.CreateAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Table) - assert response.name == "name_value" - assert response.granularity == table.Table.TimestampGranularity.MILLIS - assert response.deletion_protection is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_modify_column_families_async_from_dict(): - await test_modify_column_families_async(request_type=dict) +async def test_create_authorized_view_async_from_dict(): + await test_create_authorized_view_async(request_type=dict) -def test_modify_column_families_field_headers(): +def test_create_authorized_view_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.ModifyColumnFamiliesRequest() + request = bigtable_table_admin.CreateAuthorizedViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: - call.return_value = table.Table() - client.modify_column_families(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3180,28 +4309,30 @@ def test_modify_column_families_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_modify_column_families_field_headers_async(): +async def test_create_authorized_view_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.ModifyColumnFamiliesRequest() + request = bigtable_table_admin.CreateAuthorizedViewRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Table()) - await client.modify_column_families(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3212,47 +4343,45 @@ async def test_modify_column_families_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_modify_column_families_flattened(): +def test_create_authorized_view_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Table() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.modify_column_families( - name="name_value", - modifications=[ - bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( - id="id_value" - ) - ], + client.create_authorized_view( + parent="parent_value", + authorized_view=table.AuthorizedView(name="name_value"), + authorized_view_id="authorized_view_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val - arg = args[0].modifications - mock_val = [ - bigtable_table_admin.ModifyColumnFamiliesRequest.Modification(id="id_value") - ] + arg = args[0].authorized_view + mock_val = table.AuthorizedView(name="name_value") + assert arg == mock_val + arg = args[0].authorized_view_id + mock_val = "authorized_view_id_value" assert arg == mock_val -def test_modify_column_families_flattened_error(): +def test_create_authorized_view_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3260,58 +4389,55 @@ def test_modify_column_families_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.modify_column_families( - bigtable_table_admin.ModifyColumnFamiliesRequest(), - name="name_value", - modifications=[ - bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( - id="id_value" - ) - ], + client.create_authorized_view( + bigtable_table_admin.CreateAuthorizedViewRequest(), + parent="parent_value", + authorized_view=table.AuthorizedView(name="name_value"), + authorized_view_id="authorized_view_id_value", ) @pytest.mark.asyncio -async def test_modify_column_families_flattened_async(): +async def test_create_authorized_view_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.modify_column_families), "__call__" + type(client.transport.create_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Table() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Table()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.modify_column_families( - name="name_value", - modifications=[ - bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( - id="id_value" - ) - ], + response = await client.create_authorized_view( + parent="parent_value", + authorized_view=table.AuthorizedView(name="name_value"), + authorized_view_id="authorized_view_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val - arg = args[0].modifications - mock_val = [ - bigtable_table_admin.ModifyColumnFamiliesRequest.Modification(id="id_value") - ] + arg = args[0].authorized_view + mock_val = table.AuthorizedView(name="name_value") + assert arg == mock_val + arg = args[0].authorized_view_id + mock_val = "authorized_view_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_modify_column_families_flattened_error_async(): +async def test_create_authorized_view_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3319,25 +4445,22 @@ async def test_modify_column_families_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.modify_column_families( - bigtable_table_admin.ModifyColumnFamiliesRequest(), - name="name_value", - modifications=[ - bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( - id="id_value" - ) - ], + await client.create_authorized_view( + bigtable_table_admin.CreateAuthorizedViewRequest(), + parent="parent_value", + authorized_view=table.AuthorizedView(name="name_value"), + authorized_view_id="authorized_view_id_value", ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.DropRowRangeRequest, + bigtable_table_admin.ListAuthorizedViewsRequest, dict, ], ) -def test_drop_row_range(request_type, transport: str = "grpc"): +def test_list_authorized_views(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3348,21 +4471,27 @@ def test_drop_row_range(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: + with mock.patch.object( + type(client.transport.list_authorized_views), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.drop_row_range(request) + call.return_value = bigtable_table_admin.ListAuthorizedViewsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_authorized_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DropRowRangeRequest() + request = bigtable_table_admin.ListAuthorizedViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListAuthorizedViewsPager) + assert response.next_page_token == "next_page_token_value" -def test_drop_row_range_empty_call(): +def test_list_authorized_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -3371,165 +4500,165 @@ def test_drop_row_range_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: - client.drop_row_range() + with mock.patch.object( + type(client.transport.list_authorized_views), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_authorized_views() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DropRowRangeRequest() + assert args[0] == bigtable_table_admin.ListAuthorizedViewsRequest() -@pytest.mark.asyncio -async def test_drop_row_range_async( - transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.DropRowRangeRequest, -): - client = BigtableTableAdminAsyncClient( +def test_list_authorized_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.ListAuthorizedViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.drop_row_range(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + with mock.patch.object( + type(client.transport.list_authorized_views), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_authorized_views(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DropRowRangeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_drop_row_range_async_from_dict(): - await test_drop_row_range_async(request_type=dict) + assert args[0] == bigtable_table_admin.ListAuthorizedViewsRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_drop_row_range_field_headers(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) +def test_list_authorized_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = bigtable_table_admin.DropRowRangeRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request.name = "name_value" + # Ensure method has been cached + assert ( + client._transport.list_authorized_views + in client._transport._wrapped_methods + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: - call.return_value = None - client.drop_row_range(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_authorized_views + ] = mock_rpc + request = {} + client.list_authorized_views(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + assert mock_rpc.call_count == 1 - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + client.list_authorized_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_drop_row_range_field_headers_async(): +async def test_list_authorized_views_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = bigtable_table_admin.DropRowRangeRequest() - - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.drop_row_range(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + with mock.patch.object( + type(client.transport.list_authorized_views), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListAuthorizedViewsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_authorized_views() + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + assert args[0] == bigtable_table_admin.ListAuthorizedViewsRequest() -@pytest.mark.parametrize( - "request_type", - [ - bigtable_table_admin.GenerateConsistencyTokenRequest, - dict, - ], -) -def test_generate_consistency_token(request_type, transport: str = "grpc"): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +@pytest.mark.asyncio +async def test_list_authorized_views_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse( - consistency_token="consistency_token_value", + # Ensure method has been cached + assert ( + client._client._transport.list_authorized_views + in client._client._transport._wrapped_methods ) - response = client.generate_consistency_token(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GenerateConsistencyTokenRequest() + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) - # Establish that the response is the type that we expect. - assert isinstance(response, bigtable_table_admin.GenerateConsistencyTokenResponse) - assert response.consistency_token == "consistency_token_value" + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_authorized_views + ] = mock_object + request = {} + await client.list_authorized_views(request) -def test_generate_consistency_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" - ) as call: - client.generate_consistency_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GenerateConsistencyTokenRequest() + await client.list_authorized_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio -async def test_generate_consistency_token_async( +async def test_list_authorized_views_async( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.GenerateConsistencyTokenRequest, + request_type=bigtable_table_admin.ListAuthorizedViewsRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3542,48 +4671,49 @@ async def test_generate_consistency_token_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" + type(client.transport.list_authorized_views), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.GenerateConsistencyTokenResponse( - consistency_token="consistency_token_value", + bigtable_table_admin.ListAuthorizedViewsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.generate_consistency_token(request) + response = await client.list_authorized_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GenerateConsistencyTokenRequest() + request = bigtable_table_admin.ListAuthorizedViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, bigtable_table_admin.GenerateConsistencyTokenResponse) - assert response.consistency_token == "consistency_token_value" + assert isinstance(response, pagers.ListAuthorizedViewsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_generate_consistency_token_async_from_dict(): - await test_generate_consistency_token_async(request_type=dict) +async def test_list_authorized_views_async_from_dict(): + await test_list_authorized_views_async(request_type=dict) -def test_generate_consistency_token_field_headers(): +def test_list_authorized_views_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.GenerateConsistencyTokenRequest() + request = bigtable_table_admin.ListAuthorizedViewsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" + type(client.transport.list_authorized_views), "__call__" ) as call: - call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse() - client.generate_consistency_token(request) + call.return_value = bigtable_table_admin.ListAuthorizedViewsResponse() + client.list_authorized_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3594,30 +4724,30 @@ def test_generate_consistency_token_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_generate_consistency_token_field_headers_async(): +async def test_list_authorized_views_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.GenerateConsistencyTokenRequest() + request = bigtable_table_admin.ListAuthorizedViewsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" + type(client.transport.list_authorized_views), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.GenerateConsistencyTokenResponse() + bigtable_table_admin.ListAuthorizedViewsResponse() ) - await client.generate_consistency_token(request) + await client.list_authorized_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3628,37 +4758,37 @@ async def test_generate_consistency_token_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_generate_consistency_token_flattened(): +def test_list_authorized_views_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" + type(client.transport.list_authorized_views), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse() + call.return_value = bigtable_table_admin.ListAuthorizedViewsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.generate_consistency_token( - name="name_value", + client.list_authorized_views( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_generate_consistency_token_flattened_error(): +def test_list_authorized_views_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3666,45 +4796,45 @@ def test_generate_consistency_token_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.generate_consistency_token( - bigtable_table_admin.GenerateConsistencyTokenRequest(), - name="name_value", + client.list_authorized_views( + bigtable_table_admin.ListAuthorizedViewsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_generate_consistency_token_flattened_async(): +async def test_list_authorized_views_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_consistency_token), "__call__" + type(client.transport.list_authorized_views), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse() + call.return_value = bigtable_table_admin.ListAuthorizedViewsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.GenerateConsistencyTokenResponse() + bigtable_table_admin.ListAuthorizedViewsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.generate_consistency_token( - name="name_value", + response = await client.list_authorized_views( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_generate_consistency_token_flattened_error_async(): +async def test_list_authorized_views_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3712,20 +4842,218 @@ async def test_generate_consistency_token_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.generate_consistency_token( - bigtable_table_admin.GenerateConsistencyTokenRequest(), - name="name_value", + await client.list_authorized_views( + bigtable_table_admin.ListAuthorizedViewsRequest(), + parent="parent_value", + ) + + +def test_list_authorized_views_pager(transport_name: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authorized_views), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + table.AuthorizedView(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[], + next_page_token="def", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_authorized_views(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, table.AuthorizedView) for i in results) + + +def test_list_authorized_views_pages(transport_name: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authorized_views), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + table.AuthorizedView(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[], + next_page_token="def", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + ], + ), + RuntimeError, + ) + pages = list(client.list_authorized_views(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_authorized_views_async_pager(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authorized_views), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + table.AuthorizedView(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[], + next_page_token="def", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_authorized_views( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, table.AuthorizedView) for i in responses) + + +@pytest.mark.asyncio +async def test_list_authorized_views_async_pages(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authorized_views), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + table.AuthorizedView(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[], + next_page_token="def", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_authorized_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.CheckConsistencyRequest, + bigtable_table_admin.GetAuthorizedViewRequest, dict, ], ) -def test_check_consistency(request_type, transport: str = "grpc"): +def test_get_authorized_view(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3737,25 +5065,30 @@ def test_check_consistency(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.CheckConsistencyResponse( - consistent=True, + call.return_value = table.AuthorizedView( + name="name_value", + etag="etag_value", + deletion_protection=True, ) - response = client.check_consistency(request) + response = client.get_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CheckConsistencyRequest() + request = bigtable_table_admin.GetAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, bigtable_table_admin.CheckConsistencyResponse) - assert response.consistent is True + assert isinstance(response, table.AuthorizedView) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.deletion_protection is True -def test_check_consistency_empty_call(): +def test_get_authorized_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -3765,18 +5098,163 @@ def test_check_consistency_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: - client.check_consistency() + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_authorized_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CheckConsistencyRequest() + assert args[0] == bigtable_table_admin.GetAuthorizedViewRequest() + + +def test_get_authorized_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.GetAuthorizedViewRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorized_view), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_authorized_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetAuthorizedViewRequest( + name="name_value", + ) + + +def test_get_authorized_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_authorized_view in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_authorized_view + ] = mock_rpc + request = {} + client.get_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_check_consistency_async( +async def test_get_authorized_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorized_view), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.AuthorizedView( + name="name_value", + etag="etag_value", + deletion_protection=True, + ) + ) + response = await client.get_authorized_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetAuthorizedViewRequest() + + +@pytest.mark.asyncio +async def test_get_authorized_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.CheckConsistencyRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_authorized_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_authorized_view + ] = mock_object + + request = {} + await client.get_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_authorized_view_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.GetAuthorizedViewRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3789,48 +5267,53 @@ async def test_check_consistency_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.CheckConsistencyResponse( - consistent=True, + table.AuthorizedView( + name="name_value", + etag="etag_value", + deletion_protection=True, ) ) - response = await client.check_consistency(request) + response = await client.get_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CheckConsistencyRequest() + request = bigtable_table_admin.GetAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, bigtable_table_admin.CheckConsistencyResponse) - assert response.consistent is True + assert isinstance(response, table.AuthorizedView) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.deletion_protection is True @pytest.mark.asyncio -async def test_check_consistency_async_from_dict(): - await test_check_consistency_async(request_type=dict) +async def test_get_authorized_view_async_from_dict(): + await test_get_authorized_view_async(request_type=dict) -def test_check_consistency_field_headers(): +def test_get_authorized_view_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.CheckConsistencyRequest() + request = bigtable_table_admin.GetAuthorizedViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: - call.return_value = bigtable_table_admin.CheckConsistencyResponse() - client.check_consistency(request) + call.return_value = table.AuthorizedView() + client.get_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3846,25 +5329,25 @@ def test_check_consistency_field_headers(): @pytest.mark.asyncio -async def test_check_consistency_field_headers_async(): +async def test_get_authorized_view_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.CheckConsistencyRequest() + request = bigtable_table_admin.GetAuthorizedViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.CheckConsistencyResponse() + table.AuthorizedView() ) - await client.check_consistency(request) + await client.get_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3879,22 +5362,21 @@ async def test_check_consistency_field_headers_async(): ) in kw["metadata"] -def test_check_consistency_flattened(): +def test_get_authorized_view_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.CheckConsistencyResponse() + call.return_value = table.AuthorizedView() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.check_consistency( + client.get_authorized_view( name="name_value", - consistency_token="consistency_token_value", ) # Establish that the underlying call was made with the expected @@ -3904,12 +5386,9 @@ def test_check_consistency_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].consistency_token - mock_val = "consistency_token_value" - assert arg == mock_val -def test_check_consistency_flattened_error(): +def test_get_authorized_view_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3917,34 +5396,32 @@ def test_check_consistency_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.check_consistency( - bigtable_table_admin.CheckConsistencyRequest(), + client.get_authorized_view( + bigtable_table_admin.GetAuthorizedViewRequest(), name="name_value", - consistency_token="consistency_token_value", ) @pytest.mark.asyncio -async def test_check_consistency_flattened_async(): +async def test_get_authorized_view_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.check_consistency), "__call__" + type(client.transport.get_authorized_view), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.CheckConsistencyResponse() + call.return_value = table.AuthorizedView() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.CheckConsistencyResponse() + table.AuthorizedView() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.check_consistency( + response = await client.get_authorized_view( name="name_value", - consistency_token="consistency_token_value", ) # Establish that the underlying call was made with the expected @@ -3954,13 +5431,10 @@ async def test_check_consistency_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].consistency_token - mock_val = "consistency_token_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_check_consistency_flattened_error_async(): +async def test_get_authorized_view_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3968,21 +5442,20 @@ async def test_check_consistency_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.check_consistency( - bigtable_table_admin.CheckConsistencyRequest(), + await client.get_authorized_view( + bigtable_table_admin.GetAuthorizedViewRequest(), name="name_value", - consistency_token="consistency_token_value", ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.SnapshotTableRequest, + bigtable_table_admin.UpdateAuthorizedViewRequest, dict, ], ) -def test_snapshot_table(request_type, transport: str = "grpc"): +def test_update_authorized_view(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3993,21 +5466,24 @@ def test_snapshot_table(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.snapshot_table(request) + response = client.update_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.SnapshotTableRequest() + request = bigtable_table_admin.UpdateAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_snapshot_table_empty_call(): +def test_update_authorized_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -4016,17 +5492,165 @@ def test_snapshot_table_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: - client.snapshot_table() + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_authorized_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.SnapshotTableRequest() + assert args[0] == bigtable_table_admin.UpdateAuthorizedViewRequest() + + +def test_update_authorized_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.UpdateAuthorizedViewRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_authorized_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UpdateAuthorizedViewRequest() + + +def test_update_authorized_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_authorized_view + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_authorized_view + ] = mock_rpc + request = {} + client.update_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_snapshot_table_async( +async def test_update_authorized_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_authorized_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UpdateAuthorizedViewRequest() + + +@pytest.mark.asyncio +async def test_update_authorized_view_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.SnapshotTableRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_authorized_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_authorized_view + ] = mock_object + + request = {} + await client.update_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_authorized_view_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.UpdateAuthorizedViewRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4038,42 +5662,47 @@ async def test_snapshot_table_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.snapshot_table(request) + response = await client.update_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.SnapshotTableRequest() + request = bigtable_table_admin.UpdateAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_snapshot_table_async_from_dict(): - await test_snapshot_table_async(request_type=dict) +async def test_update_authorized_view_async_from_dict(): + await test_update_authorized_view_async(request_type=dict) -def test_snapshot_table_field_headers(): +def test_update_authorized_view_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.SnapshotTableRequest() + request = bigtable_table_admin.UpdateAuthorizedViewRequest() - request.name = "name_value" + request.authorized_view.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.snapshot_table(request) + client.update_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4084,28 +5713,30 @@ def test_snapshot_table_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "authorized_view.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_snapshot_table_field_headers_async(): +async def test_update_authorized_view_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.SnapshotTableRequest() + request = bigtable_table_admin.UpdateAuthorizedViewRequest() - request.name = "name_value" + request.authorized_view.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.snapshot_table(request) + await client.update_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4116,47 +5747,41 @@ async def test_snapshot_table_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "authorized_view.name=name_value", ) in kw["metadata"] -def test_snapshot_table_flattened(): +def test_update_authorized_view_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.snapshot_table( - name="name_value", - cluster="cluster_value", - snapshot_id="snapshot_id_value", - description="description_value", + client.update_authorized_view( + authorized_view=table.AuthorizedView(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].cluster - mock_val = "cluster_value" + arg = args[0].authorized_view + mock_val = table.AuthorizedView(name="name_value") assert arg == mock_val - arg = args[0].snapshot_id - mock_val = "snapshot_id_value" - assert arg == mock_val - arg = args[0].description - mock_val = "description_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_snapshot_table_flattened_error(): +def test_update_authorized_view_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4164,23 +5789,23 @@ def test_snapshot_table_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.snapshot_table( - bigtable_table_admin.SnapshotTableRequest(), - name="name_value", - cluster="cluster_value", - snapshot_id="snapshot_id_value", - description="description_value", + client.update_authorized_view( + bigtable_table_admin.UpdateAuthorizedViewRequest(), + authorized_view=table.AuthorizedView(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_snapshot_table_flattened_async(): +async def test_update_authorized_view_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + with mock.patch.object( + type(client.transport.update_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -4189,33 +5814,25 @@ async def test_snapshot_table_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.snapshot_table( - name="name_value", - cluster="cluster_value", - snapshot_id="snapshot_id_value", - description="description_value", + response = await client.update_authorized_view( + authorized_view=table.AuthorizedView(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - arg = args[0].cluster - mock_val = "cluster_value" - assert arg == mock_val - arg = args[0].snapshot_id - mock_val = "snapshot_id_value" + arg = args[0].authorized_view + mock_val = table.AuthorizedView(name="name_value") assert arg == mock_val - arg = args[0].description - mock_val = "description_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_snapshot_table_flattened_error_async(): +async def test_update_authorized_view_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4223,23 +5840,21 @@ async def test_snapshot_table_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.snapshot_table( - bigtable_table_admin.SnapshotTableRequest(), - name="name_value", - cluster="cluster_value", - snapshot_id="snapshot_id_value", - description="description_value", + await client.update_authorized_view( + bigtable_table_admin.UpdateAuthorizedViewRequest(), + authorized_view=table.AuthorizedView(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.GetSnapshotRequest, + bigtable_table_admin.DeleteAuthorizedViewRequest, dict, ], ) -def test_get_snapshot(request_type, transport: str = "grpc"): +def test_delete_authorized_view(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4250,30 +5865,24 @@ def test_get_snapshot(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Snapshot( - name="name_value", - data_size_bytes=1594, - state=table.Snapshot.State.READY, - description="description_value", - ) - response = client.get_snapshot(request) + call.return_value = None + response = client.delete_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetSnapshotRequest() + request = bigtable_table_admin.DeleteAuthorizedViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Snapshot) - assert response.name == "name_value" - assert response.data_size_bytes == 1594 - assert response.state == table.Snapshot.State.READY - assert response.description == "description_value" + assert response is None -def test_get_snapshot_empty_call(): +def test_delete_authorized_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -4282,73 +5891,211 @@ def test_get_snapshot_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: - client.get_snapshot() + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_authorized_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetSnapshotRequest() + assert args[0] == bigtable_table_admin.DeleteAuthorizedViewRequest() -@pytest.mark.asyncio -async def test_get_snapshot_async( - transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.GetSnapshotRequest, -): - client = BigtableTableAdminAsyncClient( +def test_delete_authorized_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.DeleteAuthorizedViewRequest( + name="name_value", + etag="etag_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - table.Snapshot( - name="name_value", - data_size_bytes=1594, - state=table.Snapshot.State.READY, - description="description_value", - ) + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = await client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + client.delete_authorized_view(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, table.Snapshot) - assert response.name == "name_value" - assert response.data_size_bytes == 1594 - assert response.state == table.Snapshot.State.READY - assert response.description == "description_value" + assert args[0] == bigtable_table_admin.DeleteAuthorizedViewRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_authorized_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_authorized_view + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_authorized_view + ] = mock_rpc + request = {} + client.delete_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_snapshot_async_from_dict(): - await test_get_snapshot_async(request_type=dict) +async def test_delete_authorized_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_authorized_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DeleteAuthorizedViewRequest() -def test_get_snapshot_field_headers(): +@pytest.mark.asyncio +async def test_delete_authorized_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_authorized_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_authorized_view + ] = mock_object + + request = {} + await client.delete_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_authorized_view_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.DeleteAuthorizedViewRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.DeleteAuthorizedViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_authorized_view_async_from_dict(): + await test_delete_authorized_view_async(request_type=dict) + + +def test_delete_authorized_view_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.GetSnapshotRequest() + request = bigtable_table_admin.DeleteAuthorizedViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: - call.return_value = table.Snapshot() - client.get_snapshot(request) + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: + call.return_value = None + client.delete_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4364,21 +6111,23 @@ def test_get_snapshot_field_headers(): @pytest.mark.asyncio -async def test_get_snapshot_field_headers_async(): +async def test_delete_authorized_view_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.GetSnapshotRequest() + request = bigtable_table_admin.DeleteAuthorizedViewRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Snapshot()) - await client.get_snapshot(request) + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_authorized_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4393,18 +6142,20 @@ async def test_get_snapshot_field_headers_async(): ) in kw["metadata"] -def test_get_snapshot_flattened(): +def test_delete_authorized_view_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Snapshot() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_snapshot( + client.delete_authorized_view( name="name_value", ) @@ -4417,7 +6168,7 @@ def test_get_snapshot_flattened(): assert arg == mock_val -def test_get_snapshot_flattened_error(): +def test_delete_authorized_view_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4425,27 +6176,29 @@ def test_get_snapshot_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_snapshot( - bigtable_table_admin.GetSnapshotRequest(), + client.delete_authorized_view( + bigtable_table_admin.DeleteAuthorizedViewRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_snapshot_flattened_async(): +async def test_delete_authorized_view_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.delete_authorized_view), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Snapshot() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Snapshot()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_snapshot( + response = await client.delete_authorized_view( name="name_value", ) @@ -4459,7 +6212,7 @@ async def test_get_snapshot_flattened_async(): @pytest.mark.asyncio -async def test_get_snapshot_flattened_error_async(): +async def test_delete_authorized_view_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4467,8 +6220,8 @@ async def test_get_snapshot_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_snapshot( - bigtable_table_admin.GetSnapshotRequest(), + await client.delete_authorized_view( + bigtable_table_admin.DeleteAuthorizedViewRequest(), name="name_value", ) @@ -4476,11 +6229,11 @@ async def test_get_snapshot_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.ListSnapshotsRequest, + bigtable_table_admin.ModifyColumnFamiliesRequest, dict, ], ) -def test_list_snapshots(request_type, transport: str = "grpc"): +def test_modify_column_families(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4491,24 +6244,31 @@ def test_list_snapshots(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.ListSnapshotsResponse( - next_page_token="next_page_token_value", + call.return_value = table.Table( + name="name_value", + granularity=table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, ) - response = client.list_snapshots(request) + response = client.modify_column_families(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListSnapshotsRequest() + request = bigtable_table_admin.ModifyColumnFamiliesRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, table.Table) + assert response.name == "name_value" + assert response.granularity == table.Table.TimestampGranularity.MILLIS + assert response.deletion_protection is True -def test_list_snapshots_empty_call(): +def test_modify_column_families_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -4517,138 +6277,307 @@ def test_list_snapshots_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - client.list_snapshots() + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_column_families() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListSnapshotsRequest() + assert args[0] == bigtable_table_admin.ModifyColumnFamiliesRequest() -@pytest.mark.asyncio -async def test_list_snapshots_async( - transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.ListSnapshotsRequest, -): - client = BigtableTableAdminAsyncClient( +def test_modify_column_families_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.ModifyColumnFamiliesRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.ListSnapshotsResponse( - next_page_token="next_page_token_value", - ) + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = await client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + client.modify_column_families(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_snapshots_async_from_dict(): - await test_list_snapshots_async(request_type=dict) + assert args[0] == bigtable_table_admin.ModifyColumnFamiliesRequest( + name="name_value", + ) -def test_list_snapshots_field_headers(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) +def test_modify_column_families_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = bigtable_table_admin.ListSnapshotsRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request.parent = "parent_value" + # Ensure method has been cached + assert ( + client._transport.modify_column_families + in client._transport._wrapped_methods + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - call.return_value = bigtable_table_admin.ListSnapshotsResponse() - client.list_snapshots(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_column_families + ] = mock_rpc + request = {} + client.modify_column_families(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + assert mock_rpc.call_count == 1 - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + client.modify_column_families(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_snapshots_field_headers_async(): +async def test_modify_column_families_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = bigtable_table_admin.ListSnapshotsRequest() - - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: + # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.ListSnapshotsResponse() + table.Table( + name="name_value", + granularity=table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, + ) ) - await client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + response = await client.modify_column_families() + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == bigtable_table_admin.ModifyColumnFamiliesRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", + +@pytest.mark.asyncio +async def test_modify_column_families_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_column_families + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_column_families + ] = mock_object + + request = {} + await client.modify_column_families(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.modify_column_families(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_column_families_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.ModifyColumnFamiliesRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.Table( + name="name_value", + granularity=table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, + ) + ) + response = await client.modify_column_families(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.ModifyColumnFamiliesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, table.Table) + assert response.name == "name_value" + assert response.granularity == table.Table.TimestampGranularity.MILLIS + assert response.deletion_protection is True + + +@pytest.mark.asyncio +async def test_modify_column_families_async_from_dict(): + await test_modify_column_families_async(request_type=dict) + + +def test_modify_column_families_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.ModifyColumnFamiliesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: + call.return_value = table.Table() + client.modify_column_families(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", ) in kw["metadata"] -def test_list_snapshots_flattened(): +@pytest.mark.asyncio +async def test_modify_column_families_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.ModifyColumnFamiliesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Table()) + await client.modify_column_families(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_modify_column_families_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.ListSnapshotsResponse() + call.return_value = table.Table() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_snapshots( - parent="parent_value", + client.modify_column_families( + name="name_value", + modifications=[ + bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( + id="id_value" + ) + ], ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].modifications + mock_val = [ + bigtable_table_admin.ModifyColumnFamiliesRequest.Modification(id="id_value") + ] assert arg == mock_val -def test_list_snapshots_flattened_error(): +def test_modify_column_families_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4656,43 +6585,58 @@ def test_list_snapshots_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_snapshots( - bigtable_table_admin.ListSnapshotsRequest(), - parent="parent_value", + client.modify_column_families( + bigtable_table_admin.ModifyColumnFamiliesRequest(), + name="name_value", + modifications=[ + bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( + id="id_value" + ) + ], ) @pytest.mark.asyncio -async def test_list_snapshots_flattened_async(): +async def test_modify_column_families_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + with mock.patch.object( + type(client.transport.modify_column_families), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.ListSnapshotsResponse() + call.return_value = table.Table() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.ListSnapshotsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Table()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_snapshots( - parent="parent_value", + response = await client.modify_column_families( + name="name_value", + modifications=[ + bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( + id="id_value" + ) + ], ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].modifications + mock_val = [ + bigtable_table_admin.ModifyColumnFamiliesRequest.Modification(id="id_value") + ] assert arg == mock_val @pytest.mark.asyncio -async def test_list_snapshots_flattened_error_async(): +async def test_modify_column_families_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4700,254 +6644,201 @@ async def test_list_snapshots_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_snapshots( - bigtable_table_admin.ListSnapshotsRequest(), - parent="parent_value", + await client.modify_column_families( + bigtable_table_admin.ModifyColumnFamiliesRequest(), + name="name_value", + modifications=[ + bigtable_table_admin.ModifyColumnFamiliesRequest.Modification( + id="id_value" + ) + ], ) -def test_list_snapshots_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.DropRowRangeRequest, + dict, + ], +) +def test_drop_row_range(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - table.Snapshot(), - ], - next_page_token="abc", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[], - next_page_token="def", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - ], - next_page_token="ghi", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_snapshots(request={}) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.drop_row_range(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.DropRowRangeRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, table.Snapshot) for i in results) + # Establish that the response is the type that we expect. + assert response is None -def test_list_snapshots_pages(transport_name: str = "grpc"): +def test_drop_row_range_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - table.Snapshot(), - ], - next_page_token="abc", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[], - next_page_token="def", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - ], - next_page_token="ghi", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_snapshots(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.drop_row_range() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DropRowRangeRequest() -@pytest.mark.asyncio -async def test_list_snapshots_async_pager(): - client = BigtableTableAdminAsyncClient( +def test_drop_row_range_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.DropRowRangeRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - table.Snapshot(), - ], - next_page_token="abc", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[], - next_page_token="def", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - ], - next_page_token="ghi", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - ], - ), - RuntimeError, + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - async_pager = await client.list_snapshots( - request={}, + client.drop_row_range(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DropRowRangeRequest( + name="name_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, table.Snapshot) for i in responses) - -@pytest.mark.asyncio -async def test_list_snapshots_async_pages(): - client = BigtableTableAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - table.Snapshot(), - ], - next_page_token="abc", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[], - next_page_token="def", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - ], - next_page_token="ghi", - ), - bigtable_table_admin.ListSnapshotsResponse( - snapshots=[ - table.Snapshot(), - table.Snapshot(), - ], - ), - RuntimeError, +def test_drop_row_range_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_snapshots(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize( - "request_type", - [ - bigtable_table_admin.DeleteSnapshotRequest, - dict, - ], -) -def test_delete_snapshot(request_type, transport: str = "grpc"): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Ensure method has been cached + assert client._transport.drop_row_range in client._transport._wrapped_methods - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_snapshot(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.drop_row_range] = mock_rpc + request = {} + client.drop_row_range(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteSnapshotRequest() + assert mock_rpc.call_count == 1 - # Establish that the response is the type that we expect. - assert response is None + client.drop_row_range(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_delete_snapshot_empty_call(): + +@pytest.mark.asyncio +async def test_drop_row_range_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = BigtableTableAdminClient( + client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: - client.delete_snapshot() + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.drop_row_range() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteSnapshotRequest() + assert args[0] == bigtable_table_admin.DropRowRangeRequest() @pytest.mark.asyncio -async def test_delete_snapshot_async( +async def test_drop_row_range_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.DeleteSnapshotRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.drop_row_range + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.drop_row_range + ] = mock_object + + request = {} + await client.drop_row_range(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.drop_row_range(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_drop_row_range_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.DropRowRangeRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4959,40 +6850,41 @@ async def test_delete_snapshot_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_snapshot(request) + response = await client.drop_row_range(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteSnapshotRequest() + request = bigtable_table_admin.DropRowRangeRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_delete_snapshot_async_from_dict(): - await test_delete_snapshot_async(request_type=dict) +async def test_drop_row_range_async_from_dict(): + await test_drop_row_range_async(request_type=dict) -def test_delete_snapshot_field_headers(): +def test_drop_row_range_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.DeleteSnapshotRequest() + request = bigtable_table_admin.DropRowRangeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: call.return_value = None - client.delete_snapshot(request) + client.drop_row_range(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5008,21 +6900,21 @@ def test_delete_snapshot_field_headers(): @pytest.mark.asyncio -async def test_delete_snapshot_field_headers_async(): +async def test_drop_row_range_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.DeleteSnapshotRequest() + request = bigtable_table_admin.DropRowRangeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.drop_row_range), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_snapshot(request) + await client.drop_row_range(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5037,138 +6929,210 @@ async def test_delete_snapshot_field_headers_async(): ) in kw["metadata"] -def test_delete_snapshot_flattened(): - client = BigtableTableAdminClient( +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.GenerateConsistencyTokenRequest, + dict, + ], +) +def test_generate_consistency_token(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_snapshot( - name="name_value", + call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse( + consistency_token="consistency_token_value", ) + response = client.generate_consistency_token(request) - # Establish that the underlying call was made with the expected - # request object values. + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = bigtable_table_admin.GenerateConsistencyTokenRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, bigtable_table_admin.GenerateConsistencyTokenResponse) + assert response.consistency_token == "consistency_token_value" -def test_delete_snapshot_flattened_error(): +def test_generate_consistency_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_snapshot( - bigtable_table_admin.DeleteSnapshotRequest(), - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.generate_consistency_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GenerateConsistencyTokenRequest() -@pytest.mark.asyncio -async def test_delete_snapshot_flattened_async(): - client = BigtableTableAdminAsyncClient( +def test_generate_consistency_token_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.GenerateConsistencyTokenRequest( + name="name_value", + ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_snapshot( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) + client.generate_consistency_token(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_snapshot_flattened_error_async(): - client = BigtableTableAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_snapshot( - bigtable_table_admin.DeleteSnapshotRequest(), + assert args[0] == bigtable_table_admin.GenerateConsistencyTokenRequest( name="name_value", ) -@pytest.mark.parametrize( - "request_type", - [ - bigtable_table_admin.CreateBackupRequest, - dict, - ], -) -def test_create_backup(request_type, transport: str = "grpc"): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_generate_consistency_token_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup(request) + # Ensure method has been cached + assert ( + client._transport.generate_consistency_token + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_consistency_token + ] = mock_rpc + request = {} + client.generate_consistency_token(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateBackupRequest() + assert mock_rpc.call_count == 1 - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + client.generate_consistency_token(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_create_backup_empty_call(): + +@pytest.mark.asyncio +async def test_generate_consistency_token_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = BigtableTableAdminClient( + client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - client.create_backup() + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.GenerateConsistencyTokenResponse( + consistency_token="consistency_token_value", + ) + ) + response = await client.generate_consistency_token() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateBackupRequest() + assert args[0] == bigtable_table_admin.GenerateConsistencyTokenRequest() @pytest.mark.asyncio -async def test_create_backup_async( +async def test_generate_consistency_token_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.CreateBackupRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_consistency_token + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_consistency_token + ] = mock_object + + request = {} + await client.generate_consistency_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.generate_consistency_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_consistency_token_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.GenerateConsistencyTokenRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5180,42 +7144,50 @@ async def test_create_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + bigtable_table_admin.GenerateConsistencyTokenResponse( + consistency_token="consistency_token_value", + ) ) - response = await client.create_backup(request) + response = await client.generate_consistency_token(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CreateBackupRequest() + request = bigtable_table_admin.GenerateConsistencyTokenRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, bigtable_table_admin.GenerateConsistencyTokenResponse) + assert response.consistency_token == "consistency_token_value" @pytest.mark.asyncio -async def test_create_backup_async_from_dict(): - await test_create_backup_async(request_type=dict) +async def test_generate_consistency_token_async_from_dict(): + await test_generate_consistency_token_async(request_type=dict) -def test_create_backup_field_headers(): +def test_generate_consistency_token_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.CreateBackupRequest() + request = bigtable_table_admin.GenerateConsistencyTokenRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup(request) + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: + call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse() + client.generate_consistency_token(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5226,28 +7198,30 @@ def test_create_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_backup_field_headers_async(): +async def test_generate_consistency_token_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.CreateBackupRequest() + request = bigtable_table_admin.GenerateConsistencyTokenRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + bigtable_table_admin.GenerateConsistencyTokenResponse() ) - await client.create_backup(request) + await client.generate_consistency_token(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5258,43 +7232,37 @@ async def test_create_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_backup_flattened(): +def test_generate_consistency_token_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_backup( - parent="parent_value", - backup_id="backup_id_value", - backup=table.Backup(name="name_value"), + client.generate_consistency_token( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" - assert arg == mock_val - arg = args[0].backup - mock_val = table.Backup(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_backup_flattened_error(): +def test_generate_consistency_token_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5302,53 +7270,45 @@ def test_create_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup( - bigtable_table_admin.CreateBackupRequest(), - parent="parent_value", - backup_id="backup_id_value", - backup=table.Backup(name="name_value"), + client.generate_consistency_token( + bigtable_table_admin.GenerateConsistencyTokenRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_backup_flattened_async(): +async def test_generate_consistency_token_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_consistency_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = bigtable_table_admin.GenerateConsistencyTokenResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + bigtable_table_admin.GenerateConsistencyTokenResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_backup( - parent="parent_value", - backup_id="backup_id_value", - backup=table.Backup(name="name_value"), + response = await client.generate_consistency_token( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" - assert arg == mock_val - arg = args[0].backup - mock_val = table.Backup(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_backup_flattened_error_async(): +async def test_generate_consistency_token_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5356,22 +7316,20 @@ async def test_create_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_backup( - bigtable_table_admin.CreateBackupRequest(), - parent="parent_value", - backup_id="backup_id_value", - backup=table.Backup(name="name_value"), + await client.generate_consistency_token( + bigtable_table_admin.GenerateConsistencyTokenRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.GetBackupRequest, + bigtable_table_admin.CheckConsistencyRequest, dict, ], ) -def test_get_backup(request_type, transport: str = "grpc"): +def test_check_consistency(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5382,32 +7340,27 @@ def test_get_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Backup( - name="name_value", - source_table="source_table_value", - source_backup="source_backup_value", - size_bytes=1089, - state=table.Backup.State.CREATING, + call.return_value = bigtable_table_admin.CheckConsistencyResponse( + consistent=True, ) - response = client.get_backup(request) + response = client.check_consistency(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetBackupRequest() + request = bigtable_table_admin.CheckConsistencyRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Backup) - assert response.name == "name_value" - assert response.source_table == "source_table_value" - assert response.source_backup == "source_backup_value" - assert response.size_bytes == 1089 - assert response.state == table.Backup.State.CREATING + assert isinstance(response, bigtable_table_admin.CheckConsistencyResponse) + assert response.consistent is True -def test_get_backup_empty_call(): +def test_check_consistency_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -5416,16 +7369,162 @@ def test_get_backup_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - client.get_backup() + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_consistency() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetBackupRequest() + assert args[0] == bigtable_table_admin.CheckConsistencyRequest() + + +def test_check_consistency_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.CheckConsistencyRequest( + name="name_value", + consistency_token="consistency_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_consistency(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CheckConsistencyRequest( + name="name_value", + consistency_token="consistency_token_value", + ) + + +def test_check_consistency_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_consistency in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.check_consistency + ] = mock_rpc + request = {} + client.check_consistency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_consistency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_backup_async( - transport: str = "grpc_asyncio", request_type=bigtable_table_admin.GetBackupRequest +async def test_check_consistency_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.CheckConsistencyResponse( + consistent=True, + ) + ) + response = await client.check_consistency() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CheckConsistencyRequest() + + +@pytest.mark.asyncio +async def test_check_consistency_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.check_consistency + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.check_consistency + ] = mock_object + + request = {} + await client.check_consistency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.check_consistency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_check_consistency_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.CheckConsistencyRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5437,53 +7536,50 @@ async def test_get_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - table.Backup( - name="name_value", - source_table="source_table_value", - source_backup="source_backup_value", - size_bytes=1089, - state=table.Backup.State.CREATING, + bigtable_table_admin.CheckConsistencyResponse( + consistent=True, ) ) - response = await client.get_backup(request) + response = await client.check_consistency(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.GetBackupRequest() + request = bigtable_table_admin.CheckConsistencyRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Backup) - assert response.name == "name_value" - assert response.source_table == "source_table_value" - assert response.source_backup == "source_backup_value" - assert response.size_bytes == 1089 - assert response.state == table.Backup.State.CREATING + assert isinstance(response, bigtable_table_admin.CheckConsistencyResponse) + assert response.consistent is True @pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) +async def test_check_consistency_async_from_dict(): + await test_check_consistency_async(request_type=dict) -def test_get_backup_field_headers(): +def test_check_consistency_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.GetBackupRequest() + request = bigtable_table_admin.CheckConsistencyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = table.Backup() - client.get_backup(request) + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: + call.return_value = bigtable_table_admin.CheckConsistencyResponse() + client.check_consistency(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5499,21 +7595,25 @@ def test_get_backup_field_headers(): @pytest.mark.asyncio -async def test_get_backup_field_headers_async(): +async def test_check_consistency_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.GetBackupRequest() + request = bigtable_table_admin.CheckConsistencyRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) - await client.get_backup(request) + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.CheckConsistencyResponse() + ) + await client.check_consistency(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5528,19 +7628,22 @@ async def test_get_backup_field_headers_async(): ) in kw["metadata"] -def test_get_backup_flattened(): +def test_check_consistency_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Backup() + call.return_value = bigtable_table_admin.CheckConsistencyResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup( + client.check_consistency( name="name_value", + consistency_token="consistency_token_value", ) # Establish that the underlying call was made with the expected @@ -5550,9 +7653,12 @@ def test_get_backup_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].consistency_token + mock_val = "consistency_token_value" + assert arg == mock_val -def test_get_backup_flattened_error(): +def test_check_consistency_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5560,28 +7666,34 @@ def test_get_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - bigtable_table_admin.GetBackupRequest(), + client.check_consistency( + bigtable_table_admin.CheckConsistencyRequest(), name="name_value", + consistency_token="consistency_token_value", ) @pytest.mark.asyncio -async def test_get_backup_flattened_async(): +async def test_check_consistency_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.check_consistency), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = table.Backup() + call.return_value = bigtable_table_admin.CheckConsistencyResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.CheckConsistencyResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup( + response = await client.check_consistency( name="name_value", + consistency_token="consistency_token_value", ) # Establish that the underlying call was made with the expected @@ -5591,10 +7703,13 @@ async def test_get_backup_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].consistency_token + mock_val = "consistency_token_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): +async def test_check_consistency_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5602,20 +7717,21 @@ async def test_get_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup( - bigtable_table_admin.GetBackupRequest(), + await client.check_consistency( + bigtable_table_admin.CheckConsistencyRequest(), name="name_value", + consistency_token="consistency_token_value", ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.UpdateBackupRequest, + bigtable_table_admin.SnapshotTableRequest, dict, ], ) -def test_update_backup(request_type, transport: str = "grpc"): +def test_snapshot_table(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5626,32 +7742,22 @@ def test_update_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = table.Backup( - name="name_value", - source_table="source_table_value", - source_backup="source_backup_value", - size_bytes=1089, - state=table.Backup.State.CREATING, - ) - response = client.update_backup(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.snapshot_table(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.UpdateBackupRequest() + request = bigtable_table_admin.SnapshotTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Backup) - assert response.name == "name_value" - assert response.source_table == "source_table_value" - assert response.source_backup == "source_backup_value" - assert response.size_bytes == 1089 - assert response.state == table.Backup.State.CREATING + assert isinstance(response, future.Future) -def test_update_backup_empty_call(): +def test_snapshot_table_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -5660,17 +7766,164 @@ def test_update_backup_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - client.update_backup() + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.snapshot_table() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.UpdateBackupRequest() + assert args[0] == bigtable_table_admin.SnapshotTableRequest() + + +def test_snapshot_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.SnapshotTableRequest( + name="name_value", + cluster="cluster_value", + snapshot_id="snapshot_id_value", + description="description_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.snapshot_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.SnapshotTableRequest( + name="name_value", + cluster="cluster_value", + snapshot_id="snapshot_id_value", + description="description_value", + ) + + +def test_snapshot_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.snapshot_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.snapshot_table] = mock_rpc + request = {} + client.snapshot_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.snapshot_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_update_backup_async( +async def test_snapshot_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.snapshot_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.SnapshotTableRequest() + + +@pytest.mark.asyncio +async def test_snapshot_table_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.UpdateBackupRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.snapshot_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.snapshot_table + ] = mock_object + + request = {} + await client.snapshot_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.snapshot_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_snapshot_table_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.SnapshotTableRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5682,53 +7935,43 @@ async def test_update_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - table.Backup( - name="name_value", - source_table="source_table_value", - source_backup="source_backup_value", - size_bytes=1089, - state=table.Backup.State.CREATING, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_backup(request) + response = await client.snapshot_table(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.UpdateBackupRequest() + request = bigtable_table_admin.SnapshotTableRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, table.Backup) - assert response.name == "name_value" - assert response.source_table == "source_table_value" - assert response.source_backup == "source_backup_value" - assert response.size_bytes == 1089 - assert response.state == table.Backup.State.CREATING + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) +async def test_snapshot_table_async_from_dict(): + await test_snapshot_table_async(request_type=dict) -def test_update_backup_field_headers(): +def test_snapshot_table_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.UpdateBackupRequest() + request = bigtable_table_admin.SnapshotTableRequest() - request.backup.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - call.return_value = table.Backup() - client.update_backup(request) + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.snapshot_table(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5739,26 +7982,28 @@ def test_update_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_backup_field_headers_async(): +async def test_snapshot_table_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.UpdateBackupRequest() + request = bigtable_table_admin.SnapshotTableRequest() - request.backup.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) - await client.update_backup(request) + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.snapshot_table(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5769,39 +8014,47 @@ async def test_update_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_backup_flattened(): +def test_snapshot_table_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = table.Backup() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_backup( - backup=table.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.snapshot_table( + name="name_value", + cluster="cluster_value", + snapshot_id="snapshot_id_value", + description="description_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = table.Backup(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].cluster + mock_val = "cluster_value" + assert arg == mock_val + arg = args[0].snapshot_id + mock_val = "snapshot_id_value" + assert arg == mock_val + arg = args[0].description + mock_val = "description_value" assert arg == mock_val -def test_update_backup_flattened_error(): +def test_snapshot_table_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5809,46 +8062,58 @@ def test_update_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - bigtable_table_admin.UpdateBackupRequest(), - backup=table.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.snapshot_table( + bigtable_table_admin.SnapshotTableRequest(), + name="name_value", + cluster="cluster_value", + snapshot_id="snapshot_id_value", + description="description_value", ) @pytest.mark.asyncio -async def test_update_backup_flattened_async(): +async def test_snapshot_table_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.snapshot_table), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = table.Backup() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_backup( - backup=table.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.snapshot_table( + name="name_value", + cluster="cluster_value", + snapshot_id="snapshot_id_value", + description="description_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = table.Backup(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].cluster + mock_val = "cluster_value" + assert arg == mock_val + arg = args[0].snapshot_id + mock_val = "snapshot_id_value" + assert arg == mock_val + arg = args[0].description + mock_val = "description_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): +async def test_snapshot_table_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5856,21 +8121,23 @@ async def test_update_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_backup( - bigtable_table_admin.UpdateBackupRequest(), - backup=table.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.snapshot_table( + bigtable_table_admin.SnapshotTableRequest(), + name="name_value", + cluster="cluster_value", + snapshot_id="snapshot_id_value", + description="description_value", ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.DeleteBackupRequest, + bigtable_table_admin.GetSnapshotRequest, dict, ], ) -def test_delete_backup(request_type, transport: str = "grpc"): +def test_get_snapshot(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5881,21 +8148,31 @@ def test_delete_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup(request) + call.return_value = table.Snapshot( + name="name_value", + data_size_bytes=1594, + state=table.Snapshot.State.READY, + description="description_value", + ) + response = client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteBackupRequest() + request = bigtable_table_admin.GetSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, table.Snapshot) + assert response.name == "name_value" + assert response.data_size_bytes == 1594 + assert response.state == table.Snapshot.State.READY + assert response.description == "description_value" -def test_delete_backup_empty_call(): +def test_get_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -5904,17 +8181,155 @@ def test_delete_backup_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - client.delete_backup() + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteBackupRequest() + assert args[0] == bigtable_table_admin.GetSnapshotRequest() + + +def test_get_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.GetSnapshotRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetSnapshotRequest( + name="name_value", + ) + + +def test_get_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + request = {} + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_backup_async( +async def test_get_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.Snapshot( + name="name_value", + data_size_bytes=1594, + state=table.Snapshot.State.READY, + description="description_value", + ) + ) + response = await client.get_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetSnapshotRequest() + + +@pytest.mark.asyncio +async def test_get_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.DeleteBackupRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_snapshot + ] = mock_object + + request = {} + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_snapshot_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.GetSnapshotRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5926,40 +8341,52 @@ async def test_delete_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.Snapshot( + name="name_value", + data_size_bytes=1594, + state=table.Snapshot.State.READY, + description="description_value", + ) + ) + response = await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.DeleteBackupRequest() + request = bigtable_table_admin.GetSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, table.Snapshot) + assert response.name == "name_value" + assert response.data_size_bytes == 1594 + assert response.state == table.Snapshot.State.READY + assert response.description == "description_value" @pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) +async def test_get_snapshot_async_from_dict(): + await test_get_snapshot_async(request_type=dict) -def test_delete_backup_field_headers(): +def test_get_snapshot_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.DeleteBackupRequest() + request = bigtable_table_admin.GetSnapshotRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value = None - client.delete_backup(request) + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value = table.Snapshot() + client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5975,21 +8402,21 @@ def test_delete_backup_field_headers(): @pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): +async def test_get_snapshot_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.DeleteBackupRequest() + request = bigtable_table_admin.GetSnapshotRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request) + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Snapshot()) + await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6004,18 +8431,18 @@ async def test_delete_backup_field_headers_async(): ) in kw["metadata"] -def test_delete_backup_flattened(): +def test_get_snapshot_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = table.Snapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup( + client.get_snapshot( name="name_value", ) @@ -6028,7 +8455,7 @@ def test_delete_backup_flattened(): assert arg == mock_val -def test_delete_backup_flattened_error(): +def test_get_snapshot_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6036,27 +8463,27 @@ def test_delete_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - bigtable_table_admin.DeleteBackupRequest(), + client.get_snapshot( + bigtable_table_admin.GetSnapshotRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_backup_flattened_async(): +async def test_get_snapshot_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = table.Snapshot() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Snapshot()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup( + response = await client.get_snapshot( name="name_value", ) @@ -6070,7 +8497,7 @@ async def test_delete_backup_flattened_async(): @pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): +async def test_get_snapshot_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6078,8 +8505,8 @@ async def test_delete_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup( - bigtable_table_admin.DeleteBackupRequest(), + await client.get_snapshot( + bigtable_table_admin.GetSnapshotRequest(), name="name_value", ) @@ -6087,11 +8514,11 @@ async def test_delete_backup_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.ListBackupsRequest, + bigtable_table_admin.ListSnapshotsRequest, dict, ], ) -def test_list_backups(request_type, transport: str = "grpc"): +def test_list_snapshots(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6102,24 +8529,25 @@ def test_list_backups(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.ListBackupsResponse( + call.return_value = bigtable_table_admin.ListSnapshotsResponse( next_page_token="next_page_token_value", ) - response = client.list_backups(request) + response = client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListBackupsRequest() + request = bigtable_table_admin.ListSnapshotsRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) + assert isinstance(response, pagers.ListSnapshotsPager) assert response.next_page_token == "next_page_token_value" -def test_list_backups_empty_call(): +def test_list_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -6128,67 +8556,205 @@ def test_list_backups_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - client.list_backups() + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_snapshots() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListBackupsRequest() + assert args[0] == bigtable_table_admin.ListSnapshotsRequest() -@pytest.mark.asyncio -async def test_list_backups_async( - transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.ListBackupsRequest, -): - client = BigtableTableAdminAsyncClient( +def test_list_snapshots_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.ListSnapshotsRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.ListBackupsResponse( - next_page_token="next_page_token_value", - ) + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + client.list_snapshots(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.ListBackupsRequest() + assert args[0] == bigtable_table_admin.ListSnapshotsRequest( + parent="parent_value", + page_token="page_token_value", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == "next_page_token_value" + +def test_list_snapshots_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_snapshots in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + request = {} + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) +async def test_list_snapshots_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.ListSnapshotsRequest() -def test_list_backups_field_headers(): +@pytest.mark.asyncio +async def test_list_snapshots_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_snapshots + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_snapshots + ] = mock_object + + request = {} + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_snapshots_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.ListSnapshotsRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.ListSnapshotsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_snapshots_async_from_dict(): + await test_list_snapshots_async(request_type=dict) + + +def test_list_snapshots_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.ListBackupsRequest() + request = bigtable_table_admin.ListSnapshotsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = bigtable_table_admin.ListBackupsResponse() - client.list_backups(request) + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value = bigtable_table_admin.ListSnapshotsResponse() + client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6204,23 +8770,23 @@ def test_list_backups_field_headers(): @pytest.mark.asyncio -async def test_list_backups_field_headers_async(): +async def test_list_snapshots_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.ListBackupsRequest() + request = bigtable_table_admin.ListSnapshotsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.ListBackupsResponse() + bigtable_table_admin.ListSnapshotsResponse() ) - await client.list_backups(request) + await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6235,18 +8801,18 @@ async def test_list_backups_field_headers_async(): ) in kw["metadata"] -def test_list_backups_flattened(): +def test_list_snapshots_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.ListBackupsResponse() + call.return_value = bigtable_table_admin.ListSnapshotsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backups( + client.list_snapshots( parent="parent_value", ) @@ -6259,7 +8825,7 @@ def test_list_backups_flattened(): assert arg == mock_val -def test_list_backups_flattened_error(): +def test_list_snapshots_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6267,29 +8833,29 @@ def test_list_backups_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - bigtable_table_admin.ListBackupsRequest(), + client.list_snapshots( + bigtable_table_admin.ListSnapshotsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_backups_flattened_async(): +async def test_list_snapshots_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = bigtable_table_admin.ListBackupsResponse() + call.return_value = bigtable_table_admin.ListSnapshotsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - bigtable_table_admin.ListBackupsResponse() + bigtable_table_admin.ListSnapshotsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backups( + response = await client.list_snapshots( parent="parent_value", ) @@ -6303,7 +8869,7 @@ async def test_list_backups_flattened_async(): @pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): +async def test_list_snapshots_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6311,44 +8877,44 @@ async def test_list_backups_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backups( - bigtable_table_admin.ListBackupsRequest(), + await client.list_snapshots( + bigtable_table_admin.ListSnapshotsRequest(), parent="parent_value", ) -def test_list_backups_pager(transport_name: str = "grpc"): +def test_list_snapshots_pager(transport_name: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), + table.Snapshot(), ], next_page_token="abc", ), - bigtable_table_admin.ListBackupsResponse( - backups=[], + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[], next_page_token="def", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), ], next_page_token="ghi", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), ], ), RuntimeError, @@ -6358,95 +8924,95 @@ def test_list_backups_pager(transport_name: str = "grpc"): metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_backups(request={}) + pager = client.list_snapshots(request={}) assert pager._metadata == metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, table.Backup) for i in results) + assert all(isinstance(i, table.Snapshot) for i in results) -def test_list_backups_pages(transport_name: str = "grpc"): +def test_list_snapshots_pages(transport_name: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), + table.Snapshot(), ], next_page_token="abc", ), - bigtable_table_admin.ListBackupsResponse( - backups=[], + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[], next_page_token="def", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), ], next_page_token="ghi", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), ], ), RuntimeError, ) - pages = list(client.list_backups(request={}).pages) + pages = list(client.list_snapshots(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_backups_async_pager(): +async def test_list_snapshots_async_pager(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), + table.Snapshot(), ], next_page_token="abc", ), - bigtable_table_admin.ListBackupsResponse( - backups=[], + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[], next_page_token="def", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), ], next_page_token="ghi", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), ], ), RuntimeError, ) - async_pager = await client.list_backups( + async_pager = await client.list_snapshots( request={}, ) assert async_pager.next_page_token == "abc" @@ -6455,43 +9021,43 @@ async def test_list_backups_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, table.Backup) for i in responses) + assert all(isinstance(i, table.Snapshot) for i in responses) @pytest.mark.asyncio -async def test_list_backups_async_pages(): +async def test_list_snapshots_async_pages(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), + table.Snapshot(), ], next_page_token="abc", ), - bigtable_table_admin.ListBackupsResponse( - backups=[], + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[], next_page_token="def", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), ], next_page_token="ghi", ), - bigtable_table_admin.ListBackupsResponse( - backups=[ - table.Backup(), - table.Backup(), + bigtable_table_admin.ListSnapshotsResponse( + snapshots=[ + table.Snapshot(), + table.Snapshot(), ], ), RuntimeError, @@ -6500,7 +9066,7 @@ async def test_list_backups_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_backups(request={}) + await client.list_snapshots(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -6510,11 +9076,11 @@ async def test_list_backups_async_pages(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.RestoreTableRequest, + bigtable_table_admin.DeleteSnapshotRequest, dict, ], ) -def test_restore_table(request_type, transport: str = "grpc"): +def test_delete_snapshot(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6525,21 +9091,22 @@ def test_restore_table(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_table(request) + call.return_value = None + response = client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.RestoreTableRequest() + request = bigtable_table_admin.DeleteSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert response is None -def test_restore_table_empty_call(): +def test_delete_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -6548,161 +9115,148 @@ def test_restore_table_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_table), "__call__") as call: - client.restore_table() + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.RestoreTableRequest() + assert args[0] == bigtable_table_admin.DeleteSnapshotRequest() -@pytest.mark.asyncio -async def test_restore_table_async( - transport: str = "grpc_asyncio", - request_type=bigtable_table_admin.RestoreTableRequest, -): - client = BigtableTableAdminAsyncClient( +def test_delete_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.DeleteSnapshotRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_table), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - response = await client.restore_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + client.delete_snapshot(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.RestoreTableRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_table_async_from_dict(): - await test_restore_table_async(request_type=dict) + assert args[0] == bigtable_table_admin.DeleteSnapshotRequest( + name="name_value", + ) -def test_restore_table_field_headers(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) +def test_delete_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = bigtable_table_admin.RestoreTableRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request.parent = "parent_value" + # Ensure method has been cached + assert client._transport.delete_snapshot in client._transport._wrapped_methods - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_table), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_table(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + request = {} + client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + assert mock_rpc.call_count == 1 - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_restore_table_field_headers_async(): +async def test_delete_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = bigtable_table_admin.RestoreTableRequest() - - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_table), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.restore_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_snapshot() + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == bigtable_table_admin.DeleteSnapshotRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -@pytest.mark.parametrize( - "request_type", - [ - bigtable_table_admin.CopyBackupRequest, - dict, - ], -) -def test_copy_backup(request_type, transport: str = "grpc"): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Ensure method has been cached + assert ( + client._client._transport.delete_snapshot + in client._client._transport._wrapped_methods + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.copy_backup(request) + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CopyBackupRequest() + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_snapshot + ] = mock_object - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + request = {} + await client.delete_snapshot(request) + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 -def test_copy_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + await client.delete_snapshot(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: - client.copy_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CopyBackupRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio -async def test_copy_backup_async( - transport: str = "grpc_asyncio", request_type=bigtable_table_admin.CopyBackupRequest +async def test_delete_snapshot_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.DeleteSnapshotRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6714,42 +9268,41 @@ async def test_copy_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.copy_backup(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable_table_admin.CopyBackupRequest() + request = bigtable_table_admin.DeleteSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert response is None @pytest.mark.asyncio -async def test_copy_backup_async_from_dict(): - await test_copy_backup_async(request_type=dict) +async def test_delete_snapshot_async_from_dict(): + await test_delete_snapshot_async(request_type=dict) -def test_copy_backup_field_headers(): +def test_delete_snapshot_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.CopyBackupRequest() + request = bigtable_table_admin.DeleteSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.copy_backup(request) + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value = None + client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6760,28 +9313,26 @@ def test_copy_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_copy_backup_field_headers_async(): +async def test_delete_snapshot_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable_table_admin.CopyBackupRequest() + request = bigtable_table_admin.DeleteSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.copy_backup(request) + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6792,47 +9343,35 @@ async def test_copy_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_copy_backup_flattened(): +def test_delete_snapshot_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.copy_backup( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) + client.delete_snapshot( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" - assert arg == mock_val - arg = args[0].source_backup - mock_val = "source_backup_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val - assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp( - seconds=751 - ) -def test_copy_backup_flattened_error(): +def test_delete_snapshot_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6840,58 +9379,41 @@ def test_copy_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.copy_backup( - bigtable_table_admin.CopyBackupRequest(), - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - expire_time=timestamp_pb2.Timestamp(seconds=751), + client.delete_snapshot( + bigtable_table_admin.DeleteSnapshotRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_copy_backup_flattened_async(): +async def test_delete_snapshot_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.copy_backup( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - expire_time=timestamp_pb2.Timestamp(seconds=751), + response = await client.delete_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_id - mock_val = "backup_id_value" - assert arg == mock_val - arg = args[0].source_backup - mock_val = "source_backup_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val - assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp( - seconds=751 - ) @pytest.mark.asyncio -async def test_copy_backup_flattened_error_async(): +async def test_delete_snapshot_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6899,23 +9421,20 @@ async def test_copy_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.copy_backup( - bigtable_table_admin.CopyBackupRequest(), - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - expire_time=timestamp_pb2.Timestamp(seconds=751), + await client.delete_snapshot( + bigtable_table_admin.DeleteSnapshotRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + bigtable_table_admin.CreateBackupRequest, dict, ], ) -def test_get_iam_policy(request_type, transport: str = "grpc"): +def test_create_backup(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6926,26 +9445,22 @@ def test_get_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.get_iam_policy(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + request = bigtable_table_admin.CreateBackupRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) -def test_get_iam_policy_empty_call(): +def test_create_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -6954,16 +9469,160 @@ def test_get_iam_policy_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - client.get_iam_policy() + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == bigtable_table_admin.CreateBackupRequest() + + +def test_create_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + +def test_create_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + request = {} + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +async def test_create_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup + ] = mock_object + + request = {} + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.CreateBackupRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6975,47 +9634,43 @@ async def test_get_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_iam_policy(request) + response = await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + request = bigtable_table_admin.CreateBackupRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) -def test_get_iam_policy_field_headers(): +def test_create_backup_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = bigtable_table_admin.CreateBackupRequest() - request.resource = "resource_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7026,26 +9681,28 @@ def test_get_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): +async def test_create_backup_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = bigtable_table_admin.CreateBackupRequest() - request.resource = "resource_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7056,52 +9713,43 @@ async def test_get_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_iam_policy_from_dict_foreign(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): +def test_create_backup_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_iam_policy( - resource="resource_value", + client.create_backup( + parent="parent_value", + backup_id="backup_id_value", + backup=table.Backup(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + arg = args[0].backup + mock_val = table.Backup(name="name_value") assert arg == mock_val -def test_get_iam_policy_flattened_error(): +def test_create_backup_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7109,41 +9757,53 @@ def test_get_iam_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + client.create_backup( + bigtable_table_admin.CreateBackupRequest(), + parent="parent_value", + backup_id="backup_id_value", + backup=table.Backup(name="name_value"), ) @pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): +async def test_create_backup_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource="resource_value", + response = await client.create_backup( + parent="parent_value", + backup_id="backup_id_value", + backup=table.Backup(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + arg = args[0].backup + mock_val = table.Backup(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): +async def test_create_backup_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7151,20 +9811,22 @@ async def test_get_iam_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource="resource_value", + await client.create_backup( + bigtable_table_admin.CreateBackupRequest(), + parent="parent_value", + backup_id="backup_id_value", + backup=table.Backup(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + bigtable_table_admin.GetBackupRequest, dict, ], ) -def test_set_iam_policy(request_type, transport: str = "grpc"): +def test_get_backup(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7175,26 +9837,33 @@ def test_set_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", + call.return_value = table.Backup( + name="name_value", + source_table="source_table_value", + source_backup="source_backup_value", + size_bytes=1089, + state=table.Backup.State.CREATING, ) - response = client.set_iam_policy(request) + response = client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + request = bigtable_table_admin.GetBackupRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, table.Backup) + assert response.name == "name_value" + assert response.source_table == "source_table_value" + assert response.source_backup == "source_backup_value" + assert response.size_bytes == 1089 + assert response.state == table.Backup.State.CREATING -def test_set_iam_policy_empty_call(): +def test_get_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -7203,16 +9872,153 @@ def test_set_iam_policy_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - client.set_iam_policy() + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == bigtable_table_admin.GetBackupRequest() + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +async def test_get_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.Backup( + name="name_value", + source_table="source_table_value", + source_backup="source_backup_value", + size_bytes=1089, + state=table.Backup.State.CREATING, + ) + ) + response = await client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup + ] = mock_object + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=bigtable_table_admin.GetBackupRequest ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7224,47 +10030,54 @@ async def test_set_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", + table.Backup( + name="name_value", + source_table="source_table_value", + source_backup="source_backup_value", + size_bytes=1089, + state=table.Backup.State.CREATING, ) ) - response = await client.set_iam_policy(request) + response = await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + request = bigtable_table_admin.GetBackupRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, table.Backup) + assert response.name == "name_value" + assert response.source_table == "source_table_value" + assert response.source_backup == "source_backup_value" + assert response.size_bytes == 1089 + assert response.state == table.Backup.State.CREATING @pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) -def test_set_iam_policy_field_headers(): +def test_get_backup_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = bigtable_table_admin.GetBackupRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = table.Backup() + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7275,26 +10088,26 @@ def test_set_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): +async def test_get_backup_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = bigtable_table_admin.GetBackupRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7305,53 +10118,35 @@ async def test_set_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_set_iam_policy_from_dict_foreign(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): +def test_get_backup_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = table.Backup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.set_iam_policy( - resource="resource_value", - ) + client.get_backup( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_set_iam_policy_flattened_error(): +def test_get_backup_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7359,41 +10154,41 @@ def test_set_iam_policy_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + client.get_backup( + bigtable_table_admin.GetBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): +async def test_get_backup_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() + call.return_value = table.Backup() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource="resource_value", + response = await client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): +async def test_get_backup_flattened_error_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7401,20 +10196,20 @@ async def test_set_iam_policy_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource="resource_value", + await client.get_backup( + bigtable_table_admin.GetBackupRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, + bigtable_table_admin.UpdateBackupRequest, dict, ], ) -def test_test_iam_permissions(request_type, transport: str = "grpc"): +def test_update_backup(request_type, transport: str = "grpc"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7425,26 +10220,33 @@ def test_test_iam_permissions(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + call.return_value = table.Backup( + name="name_value", + source_table="source_table_value", + source_backup="source_backup_value", + size_bytes=1089, + state=table.Backup.State.CREATING, ) - response = client.test_iam_permissions(request) + response = client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + request = bigtable_table_admin.UpdateBackupRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + assert isinstance(response, table.Backup) + assert response.name == "name_value" + assert response.source_table == "source_table_value" + assert response.source_backup == "source_backup_value" + assert response.size_bytes == 1089 + assert response.state == table.Backup.State.CREATING -def test_test_iam_permissions_empty_call(): +def test_update_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BigtableTableAdminClient( @@ -7453,19 +10255,152 @@ def test_test_iam_permissions_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - client.test_iam_permissions() + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == bigtable_table_admin.UpdateBackupRequest() + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.UpdateBackupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UpdateBackupRequest() + + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_test_iam_permissions_async( +async def test_update_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + table.Backup( + name="name_value", + source_table="source_table_value", + source_backup="source_backup_value", + size_bytes=1089, + state=table.Backup.State.CREATING, + ) + ) + response = await client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", - request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup + ] = mock_object + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.UpdateBackupRequest, ): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7477,49 +10412,54 @@ async def test_test_iam_permissions_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + table.Backup( + name="name_value", + source_table="source_table_value", + source_backup="source_backup_value", + size_bytes=1089, + state=table.Backup.State.CREATING, ) ) - response = await client.test_iam_permissions(request) + response = await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + request = bigtable_table_admin.UpdateBackupRequest() + assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ["permissions_value"] + assert isinstance(response, table.Backup) + assert response.name == "name_value" + assert response.source_table == "source_table_value" + assert response.source_backup == "source_backup_value" + assert response.size_bytes == 1089 + assert response.state == table.Backup.State.CREATING @pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) -def test_test_iam_permissions_field_headers(): +def test_update_backup_field_headers(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() + request = bigtable_table_admin.UpdateBackupRequest() - request.resource = "resource_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = table.Backup() + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7530,30 +10470,26 @@ def test_test_iam_permissions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "backup.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): +async def test_update_backup_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() + request = bigtable_table_admin.UpdateBackupRequest() - request.resource = "resource_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - await client.test_iam_permissions(request) + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7564,60 +10500,39 @@ async def test_test_iam_permissions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "backup.name=name_value", ) in kw["metadata"] -def test_test_iam_permissions_from_dict_foreign(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): +def test_update_backup_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + call.return_value = table.Backup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.test_iam_permissions( - resource="resource_value", - permissions=["permissions_value"], + client.update_backup( + backup=table.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].backup + mock_val = table.Backup(name="name_value") assert arg == mock_val - arg = args[0].permissions - mock_val = ["permissions_value"] + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_test_iam_permissions_flattened_error(): +def test_update_backup_flattened_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7625,116 +10540,4694 @@ def test_test_iam_permissions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource="resource_value", - permissions=["permissions_value"], + client.update_backup( + bigtable_table_admin.UpdateBackupRequest(), + backup=table.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): +async def test_update_backup_flattened_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + call.return_value = table.Backup() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table.Backup()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource="resource_value", - permissions=["permissions_value"], + response = await client.update_backup( + backup=table.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = "resource_value" + arg = args[0].backup + mock_val = table.Backup(name="name_value") assert arg == mock_val - arg = args[0].permissions - mock_val = ["permissions_value"] + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): +async def test_update_backup_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + bigtable_table_admin.UpdateBackupRequest(), + backup=table.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DeleteBackupRequest() + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.DeleteBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DeleteBackupRequest( + name="name_value", + ) + + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup + ] = mock_object + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.DeleteBackupRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = None + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): client = BigtableTableAdminAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + bigtable_table_admin.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + bigtable_table_admin.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = bigtable_table_admin.ListBackupsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.ListBackupsRequest() + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.ListBackupsRequest( + parent="parent_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.ListBackupsRequest( + parent="parent_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", + ) + + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListBackupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backups + ] = mock_object + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.ListBackupsRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListBackupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = bigtable_table_admin.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = bigtable_table_admin.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + bigtable_table_admin.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = bigtable_table_admin.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable_table_admin.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + bigtable_table_admin.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + table.Backup(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, table.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + table.Backup(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + table.Backup(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, table.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + table.Backup(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListBackupsResponse( + backups=[ + table.Backup(), + table.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.RestoreTableRequest, + dict, + ], +) +def test_restore_table(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.RestoreTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_table_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.RestoreTableRequest() + + +def test_restore_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.RestoreTableRequest( + parent="parent_value", + table_id="table_id_value", + backup="backup_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.RestoreTableRequest( + parent="parent_value", + table_id="table_id_value", + backup="backup_value", + ) + + +def test_restore_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_table] = mock_rpc + request = {} + client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_table_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_table() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.RestoreTableRequest() + + +@pytest.mark.asyncio +async def test_restore_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.restore_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.restore_table + ] = mock_object + + request = {} + await client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_table_async( + transport: str = "grpc_asyncio", + request_type=bigtable_table_admin.RestoreTableRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.RestoreTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_table_async_from_dict(): + await test_restore_table_async(request_type=dict) + + +def test_restore_table_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.RestoreTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_table_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.RestoreTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.CopyBackupRequest, + dict, + ], +) +def test_copy_backup(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.CopyBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_copy_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.copy_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CopyBackupRequest() + + +def test_copy_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable_table_admin.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.copy_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + +def test_copy_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + request = {} + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_copy_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.copy_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable_table_admin.CopyBackupRequest() + + +@pytest.mark.asyncio +async def test_copy_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.copy_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.copy_backup + ] = mock_object + + request = {} + await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_copy_backup_async( + transport: str = "grpc_asyncio", request_type=bigtable_table_admin.CopyBackupRequest +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = bigtable_table_admin.CopyBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_backup_async_from_dict(): + await test_copy_backup_async(request_type=dict) + + +def test_copy_backup_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.CopyBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_copy_backup_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable_table_admin.CopyBackupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_copy_backup_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.copy_backup( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + arg = args[0].source_backup + mock_val = "source_backup_value" + assert arg == mock_val + assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp( + seconds=751 + ) + + +def test_copy_backup_flattened_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.copy_backup( + bigtable_table_admin.CopyBackupRequest(), + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_copy_backup_flattened_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.copy_backup( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_id + mock_val = "backup_id_value" + assert arg == mock_val + arg = args[0].source_backup + mock_val = "source_backup_value" + assert arg == mock_val + assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_copy_backup_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.copy_backup( + bigtable_table_admin.CopyBackupRequest(), + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy + ] = mock_object + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_object + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) + + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.test_iam_permissions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.test_iam_permissions + ] = mock_object + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = BigtableTableAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.CreateTableRequest, + dict, + ], +) +def test_create_table_rest(request_type): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gba_table.Table( + name="name_value", + granularity=gba_table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gba_table.Table.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gba_table.Table) + assert response.name == "name_value" + assert response.granularity == gba_table.Table.TimestampGranularity.MILLIS + assert response.deletion_protection is True + + +def test_create_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_table] = mock_rpc + + request = {} + client.create_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_table_rest_required_fields( + request_type=bigtable_table_admin.CreateTableRequest, +): + transport_class = transports.BigtableTableAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["table_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["tableId"] = "table_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "tableId" in jsonified_request + assert jsonified_request["tableId"] == "table_id_value" + + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gba_table.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gba_table.Table.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_table_rest_unset_required_fields(): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "tableId", + "table", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_table_rest_interceptors(null_interceptor): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BigtableTableAdminRestInterceptor(), + ) + client = BigtableTableAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_create_table" + ) as post, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "pre_create_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable_table_admin.CreateTableRequest.pb( + bigtable_table_admin.CreateTableRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gba_table.Table.to_json(gba_table.Table()) + + request = bigtable_table_admin.CreateTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gba_table.Table() + + client.create_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_table_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.CreateTableRequest +): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_table(request) + + +def test_create_table_rest_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gba_table.Table() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/instances/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + table_id="table_id_value", + table=gba_table.Table(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gba_table.Table.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/instances/*}/tables" % client.transport._host, + args[1], + ) + + +def test_create_table_rest_flattened_error(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_table( + bigtable_table_admin.CreateTableRequest(), + parent="parent_value", + table_id="table_id_value", + table=gba_table.Table(name="name_value"), + ) + + +def test_create_table_rest_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.CreateTableFromSnapshotRequest, + dict, + ], +) +def test_create_table_from_snapshot_rest(request_type): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_table_from_snapshot(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_table_from_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_table_from_snapshot + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_table_from_snapshot + ] = mock_rpc + + request = {} + client.create_table_from_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_table_from_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_table_from_snapshot_rest_required_fields( + request_type=bigtable_table_admin.CreateTableFromSnapshotRequest, +): + transport_class = transports.BigtableTableAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["table_id"] = "" + request_init["source_snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table_from_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["tableId"] = "table_id_value" + jsonified_request["sourceSnapshot"] = "source_snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_table_from_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "tableId" in jsonified_request + assert jsonified_request["tableId"] == "table_id_value" + assert "sourceSnapshot" in jsonified_request + assert jsonified_request["sourceSnapshot"] == "source_snapshot_value" + + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_table_from_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_table_from_snapshot_rest_unset_required_fields(): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_table_from_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "tableId", + "sourceSnapshot", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_table_from_snapshot_rest_interceptors(null_interceptor): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BigtableTableAdminRestInterceptor(), + ) + client = BigtableTableAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_create_table_from_snapshot" + ) as post, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "pre_create_table_from_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable_table_admin.CreateTableFromSnapshotRequest.pb( + bigtable_table_admin.CreateTableFromSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = bigtable_table_admin.CreateTableFromSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_table_from_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_table_from_snapshot_rest_bad_request( + transport: str = "rest", + request_type=bigtable_table_admin.CreateTableFromSnapshotRequest, +): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_table_from_snapshot(request) + + +def test_create_table_from_snapshot_rest_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/instances/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + table_id="table_id_value", + source_snapshot="source_snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_table_from_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/instances/*}/tables:createFromSnapshot" + % client.transport._host, + args[1], + ) + + +def test_create_table_from_snapshot_rest_flattened_error(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_table_from_snapshot( + bigtable_table_admin.CreateTableFromSnapshotRequest(), + parent="parent_value", + table_id="table_id_value", + source_snapshot="source_snapshot_value", + ) + + +def test_create_table_from_snapshot_rest_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.ListTablesRequest, + dict, + ], +) +def test_list_tables_rest(request_type): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = bigtable_table_admin.ListTablesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = bigtable_table_admin.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tables(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tables_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tables in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_tables] = mock_rpc + + request = {} + client.list_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tables(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_tables_rest_required_fields( + request_type=bigtable_table_admin.ListTablesRequest, +): + transport_class = transports.BigtableTableAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tables._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tables._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = bigtable_table_admin.ListTablesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = bigtable_table_admin.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tables(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tables_rest_unset_required_fields(): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tables._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tables_rest_interceptors(null_interceptor): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BigtableTableAdminRestInterceptor(), + ) + client = BigtableTableAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_list_tables" + ) as post, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "pre_list_tables" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable_table_admin.ListTablesRequest.pb( + bigtable_table_admin.ListTablesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = bigtable_table_admin.ListTablesResponse.to_json( + bigtable_table_admin.ListTablesResponse() + ) + + request = bigtable_table_admin.ListTablesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = bigtable_table_admin.ListTablesResponse() + + client.list_tables( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tables_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.ListTablesRequest +): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/instances/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tables(request) + + +def test_list_tables_rest_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = bigtable_table_admin.ListTablesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/instances/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = bigtable_table_admin.ListTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tables(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/instances/*}/tables" % client.transport._host, + args[1], + ) + + +def test_list_tables_rest_flattened_error(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tables( + bigtable_table_admin.ListTablesRequest(), + parent="parent_value", + ) + + +def test_list_tables_rest_pager(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + bigtable_table_admin.ListTablesResponse( + tables=[ + table.Table(), + table.Table(), + table.Table(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListTablesResponse( + tables=[], + next_page_token="def", + ), + bigtable_table_admin.ListTablesResponse( + tables=[ + table.Table(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListTablesResponse( + tables=[ + table.Table(), + table.Table(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + bigtable_table_admin.ListTablesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/instances/sample2"} + + pager = client.list_tables(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, table.Table) for i in results) + + pages = list(client.list_tables(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.GetTableRequest, + dict, + ], +) +def test_get_table_rest(request_type): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = table.Table( + name="name_value", + granularity=table.Table.TimestampGranularity.MILLIS, + deletion_protection=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = table.Table.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, table.Table) + assert response.name == "name_value" + assert response.granularity == table.Table.TimestampGranularity.MILLIS + assert response.deletion_protection is True + + +def test_get_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_table] = mock_rpc + + request = {} + client.get_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_table_rest_required_fields( + request_type=bigtable_table_admin.GetTableRequest, +): + transport_class = transports.BigtableTableAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = table.Table() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = table.Table.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_table_rest_unset_required_fields(): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_table_rest_interceptors(null_interceptor): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BigtableTableAdminRestInterceptor(), + ) + client = BigtableTableAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_get_table" + ) as post, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "pre_get_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable_table_admin.GetTableRequest.pb( + bigtable_table_admin.GetTableRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = table.Table.to_json(table.Table()) + + request = bigtable_table_admin.GetTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = table.Table() + + client.get_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_table_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.GetTableRequest +): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_table(request) + + +def test_get_table_rest_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = table.Table() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/instances/sample2/tables/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = table.Table.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/instances/*/tables/*}" % client.transport._host, + args[1], + ) + + +def test_get_table_rest_flattened_error(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_table( + bigtable_table_admin.GetTableRequest(), + name="name_value", + ) + + +def test_get_table_rest_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.UpdateTableRequest, + dict, + ], +) +def test_update_table_rest(request_type): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "table": {"name": "projects/sample1/instances/sample2/tables/sample3"} + } + request_init["table"] = { + "name": "projects/sample1/instances/sample2/tables/sample3", + "cluster_states": {}, + "column_families": {}, + "granularity": 1, + "restore_info": { + "source_type": 1, + "backup_info": { + "backup": "backup_value", + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "source_table": "source_table_value", + "source_backup": "source_backup_value", + }, + }, + "change_stream_config": {"retention_period": {"seconds": 751, "nanos": 543}}, + "deletion_protection": True, + "automated_backup_policy": {"retention_period": {}, "frequency": {}}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = bigtable_table_admin.UpdateTableRequest.meta.fields["table"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["table"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["table"][field])): + del request_init["table"][field][i][subfield] + else: + del request_init["table"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_table(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_table] = mock_rpc + + request = {} + client.update_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_table_rest_required_fields( + request_type=bigtable_table_admin.UpdateTableRequest, +): + transport_class = transports.BigtableTableAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_table_rest_unset_required_fields(): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "table", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_table_rest_interceptors(null_interceptor): + transport = transports.BigtableTableAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BigtableTableAdminRestInterceptor(), + ) + client = BigtableTableAdminClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_update_table" + ) as post, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "pre_update_table" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = bigtable_table_admin.UpdateTableRequest.pb( + bigtable_table_admin.UpdateTableRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = bigtable_table_admin.UpdateTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_table_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.UpdateTableRequest +): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "table": {"name": "projects/sample1/instances/sample2/tables/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_table(request) + + +def test_update_table_rest_flattened(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "table": {"name": "projects/sample1/instances/sample2/tables/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + table=gba_table.Table(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{table.name=projects/*/instances/*/tables/*}" + % client.transport._host, + args[1], + ) + + +def test_update_table_rest_flattened_error(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource="resource_value", - permissions=["permissions_value"], + client.update_table( + bigtable_table_admin.UpdateTableRequest(), + table=gba_table.Table(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_table_rest_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.CreateTableRequest, + bigtable_table_admin.DeleteTableRequest, dict, ], ) -def test_create_table_rest(request_type): +def test_delete_table_rest(request_type): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gba_table.Table( - name="name_value", - granularity=gba_table.Table.TimestampGranularity.MILLIS, - deletion_protection=True, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gba_table.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_table(request) + response = client.delete_table(request) # Establish that the response is the type that we expect. - assert isinstance(response, gba_table.Table) - assert response.name == "name_value" - assert response.granularity == gba_table.Table.TimestampGranularity.MILLIS - assert response.deletion_protection is True + assert response is None -def test_create_table_rest_required_fields( - request_type=bigtable_table_admin.CreateTableRequest, +def test_delete_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_table] = mock_rpc + + request = {} + client.delete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_table_rest_required_fields( + request_type=bigtable_table_admin.DeleteTableRequest, ): transport_class = transports.BigtableTableAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["table_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7748,24 +15241,21 @@ def test_create_table_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_table._get_unset_required_fields(jsonified_request) + ).delete_table._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["tableId"] = "table_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_table._get_unset_required_fields(jsonified_request) + ).delete_table._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "tableId" in jsonified_request - assert jsonified_request["tableId"] == "table_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7774,7 +15264,7 @@ def test_create_table_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gba_table.Table() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7786,49 +15276,36 @@ def test_create_table_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gba_table.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_table(request) + response = client.delete_table(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_table_rest_unset_required_fields(): +def test_delete_table_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_table._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "tableId", - "table", - ) - ) - ) + unset_fields = transport.delete_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_table_rest_interceptors(null_interceptor): +def test_delete_table_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7841,14 +15318,11 @@ def test_create_table_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "post_create_table" - ) as post, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_create_table" + transports.BigtableTableAdminRestInterceptor, "pre_delete_table" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = bigtable_table_admin.CreateTableRequest.pb( - bigtable_table_admin.CreateTableRequest() + pb_message = bigtable_table_admin.DeleteTableRequest.pb( + bigtable_table_admin.DeleteTableRequest() ) transcode.return_value = { "method": "post", @@ -7860,17 +15334,15 @@ def test_create_table_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gba_table.Table.to_json(gba_table.Table()) - request = bigtable_table_admin.CreateTableRequest() + request = bigtable_table_admin.DeleteTableRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gba_table.Table() - client.create_table( + client.delete_table( request, metadata=[ ("key", "val"), @@ -7879,11 +15351,10 @@ def test_create_table_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_create_table_rest_bad_request( - transport: str = "rest", request_type=bigtable_table_admin.CreateTableRequest +def test_delete_table_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.DeleteTableRequest ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7891,7 +15362,7 @@ def test_create_table_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7903,10 +15374,10 @@ def test_create_table_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_table(request) + client.delete_table(request) -def test_create_table_rest_flattened(): +def test_delete_table_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7915,41 +15386,37 @@ def test_create_table_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gba_table.Table() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = {"name": "projects/sample1/instances/sample2/tables/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - table_id="table_id_value", - table=gba_table.Table(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gba_table.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_table(**mock_args) + client.delete_table(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{parent=projects/*/instances/*}/tables" % client.transport._host, + "%s/v2/{name=projects/*/instances/*/tables/*}" % client.transport._host, args[1], ) -def test_create_table_rest_flattened_error(transport: str = "rest"): +def test_delete_table_rest_flattened_error(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7958,15 +15425,13 @@ def test_create_table_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_table( - bigtable_table_admin.CreateTableRequest(), - parent="parent_value", - table_id="table_id_value", - table=gba_table.Table(name="name_value"), + client.delete_table( + bigtable_table_admin.DeleteTableRequest(), + name="name_value", ) -def test_create_table_rest_error(): +def test_delete_table_rest_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7975,18 +15440,18 @@ def test_create_table_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.CreateTableFromSnapshotRequest, + bigtable_table_admin.UndeleteTableRequest, dict, ], ) -def test_create_table_from_snapshot_rest(request_type): +def test_undelete_table_rest(request_type): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8001,21 +15466,59 @@ def test_create_table_from_snapshot_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_table_from_snapshot(request) + response = client.undelete_table(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_table_from_snapshot_rest_required_fields( - request_type=bigtable_table_admin.CreateTableFromSnapshotRequest, +def test_undelete_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.undelete_table] = mock_rpc + + request = {} + client.undelete_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.undelete_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_undelete_table_rest_required_fields( + request_type=bigtable_table_admin.UndeleteTableRequest, ): transport_class = transports.BigtableTableAdminRestTransport request_init = {} - request_init["parent"] = "" - request_init["table_id"] = "" - request_init["source_snapshot"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8029,27 +15532,21 @@ def test_create_table_from_snapshot_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_table_from_snapshot._get_unset_required_fields(jsonified_request) + ).undelete_table._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["tableId"] = "table_id_value" - jsonified_request["sourceSnapshot"] = "source_snapshot_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_table_from_snapshot._get_unset_required_fields(jsonified_request) + ).undelete_table._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "tableId" in jsonified_request - assert jsonified_request["tableId"] == "table_id_value" - assert "sourceSnapshot" in jsonified_request - assert jsonified_request["sourceSnapshot"] == "source_snapshot_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8083,33 +15580,24 @@ def test_create_table_from_snapshot_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_table_from_snapshot(request) + response = client.undelete_table(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_table_from_snapshot_rest_unset_required_fields(): +def test_undelete_table_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_table_from_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "tableId", - "sourceSnapshot", - ) - ) - ) + unset_fields = transport.undelete_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_table_from_snapshot_rest_interceptors(null_interceptor): +def test_undelete_table_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8124,14 +15612,14 @@ def test_create_table_from_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "post_create_table_from_snapshot" + transports.BigtableTableAdminRestInterceptor, "post_undelete_table" ) as post, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_create_table_from_snapshot" + transports.BigtableTableAdminRestInterceptor, "pre_undelete_table" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable_table_admin.CreateTableFromSnapshotRequest.pb( - bigtable_table_admin.CreateTableFromSnapshotRequest() + pb_message = bigtable_table_admin.UndeleteTableRequest.pb( + bigtable_table_admin.UndeleteTableRequest() ) transcode.return_value = { "method": "post", @@ -8147,7 +15635,7 @@ def test_create_table_from_snapshot_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = bigtable_table_admin.CreateTableFromSnapshotRequest() + request = bigtable_table_admin.UndeleteTableRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -8155,7 +15643,7 @@ def test_create_table_from_snapshot_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_table_from_snapshot( + client.undelete_table( request, metadata=[ ("key", "val"), @@ -8167,9 +15655,8 @@ def test_create_table_from_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_table_from_snapshot_rest_bad_request( - transport: str = "rest", - request_type=bigtable_table_admin.CreateTableFromSnapshotRequest, +def test_undelete_table_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.UndeleteTableRequest ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8177,7 +15664,7 @@ def test_create_table_from_snapshot_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8189,10 +15676,10 @@ def test_create_table_from_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_table_from_snapshot(request) + client.undelete_table(request) -def test_create_table_from_snapshot_rest_flattened(): +def test_undelete_table_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8204,13 +15691,11 @@ def test_create_table_from_snapshot_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = {"name": "projects/sample1/instances/sample2/tables/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - table_id="table_id_value", - source_snapshot="source_snapshot_value", + name="name_value", ) mock_args.update(sample_request) @@ -8221,20 +15706,20 @@ def test_create_table_from_snapshot_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_table_from_snapshot(**mock_args) + client.undelete_table(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{parent=projects/*/instances/*}/tables:createFromSnapshot" + "%s/v2/{name=projects/*/instances/*/tables/*}:undelete" % client.transport._host, args[1], ) -def test_create_table_from_snapshot_rest_flattened_error(transport: str = "rest"): +def test_undelete_table_rest_flattened_error(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8243,15 +15728,13 @@ def test_create_table_from_snapshot_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_table_from_snapshot( - bigtable_table_admin.CreateTableFromSnapshotRequest(), - parent="parent_value", - table_id="table_id_value", - source_snapshot="source_snapshot_value", + client.undelete_table( + bigtable_table_admin.UndeleteTableRequest(), + name="name_value", ) -def test_create_table_from_snapshot_rest_error(): +def test_undelete_table_rest_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8260,86 +15743,202 @@ def test_create_table_from_snapshot_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.ListTablesRequest, + bigtable_table_admin.CreateAuthorizedViewRequest, dict, ], ) -def test_list_tables_rest(request_type): +def test_create_authorized_view_rest(request_type): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"parent": "projects/sample1/instances/sample2/tables/sample3"} + request_init["authorized_view"] = { + "name": "name_value", + "subset_view": { + "row_prefixes": [b"row_prefixes_blob1", b"row_prefixes_blob2"], + "family_subsets": {}, + }, + "etag": "etag_value", + "deletion_protection": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = bigtable_table_admin.CreateAuthorizedViewRequest.meta.fields[ + "authorized_view" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["authorized_view"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["authorized_view"][field])): + del request_init["authorized_view"][field][i][subfield] + else: + del request_init["authorized_view"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable_table_admin.ListTablesResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = bigtable_table_admin.ListTablesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_tables(request) + response = client.create_authorized_view(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" -def test_list_tables_rest_required_fields( - request_type=bigtable_table_admin.ListTablesRequest, +def test_create_authorized_view_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_authorized_view + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_authorized_view + ] = mock_rpc + + request = {} + client.create_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_authorized_view_rest_required_fields( + request_type=bigtable_table_admin.CreateAuthorizedViewRequest, ): transport_class = transports.BigtableTableAdminRestTransport request_init = {} request_init["parent"] = "" + request_init["authorized_view_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped + assert "authorizedViewId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_tables._get_unset_required_fields(jsonified_request) + ).create_authorized_view._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "authorizedViewId" in jsonified_request + assert jsonified_request["authorizedViewId"] == request_init["authorized_view_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["authorizedViewId"] = "authorized_view_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_tables._get_unset_required_fields(jsonified_request) + ).create_authorized_view._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - "view", - ) - ) + assert not set(unset_fields) - set(("authorized_view_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "authorizedViewId" in jsonified_request + assert jsonified_request["authorizedViewId"] == "authorized_view_id_value" client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8348,7 +15947,7 @@ def test_list_tables_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = bigtable_table_admin.ListTablesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8360,48 +15959,52 @@ def test_list_tables_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = bigtable_table_admin.ListTablesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_tables(request) + response = client.create_authorized_view(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "authorizedViewId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_tables_rest_unset_required_fields(): +def test_create_authorized_view_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_tables._get_unset_required_fields({}) + unset_fields = transport.create_authorized_view._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("authorizedViewId",)) + & set( ( - "pageSize", - "pageToken", - "view", + "parent", + "authorizedViewId", + "authorizedView", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tables_rest_interceptors(null_interceptor): +def test_create_authorized_view_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8414,14 +16017,16 @@ def test_list_tables_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "post_list_tables" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_create_authorized_view" ) as post, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_list_tables" + transports.BigtableTableAdminRestInterceptor, "pre_create_authorized_view" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable_table_admin.ListTablesRequest.pb( - bigtable_table_admin.ListTablesRequest() + pb_message = bigtable_table_admin.CreateAuthorizedViewRequest.pb( + bigtable_table_admin.CreateAuthorizedViewRequest() ) transcode.return_value = { "method": "post", @@ -8433,19 +16038,19 @@ def test_list_tables_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = bigtable_table_admin.ListTablesResponse.to_json( - bigtable_table_admin.ListTablesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = bigtable_table_admin.ListTablesRequest() + request = bigtable_table_admin.CreateAuthorizedViewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = bigtable_table_admin.ListTablesResponse() + post.return_value = operations_pb2.Operation() - client.list_tables( + client.create_authorized_view( request, metadata=[ ("key", "val"), @@ -8457,8 +16062,9 @@ def test_list_tables_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_tables_rest_bad_request( - transport: str = "rest", request_type=bigtable_table_admin.ListTablesRequest +def test_create_authorized_view_rest_bad_request( + transport: str = "rest", + request_type=bigtable_table_admin.CreateAuthorizedViewRequest, ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8466,7 +16072,7 @@ def test_list_tables_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/instances/sample2"} + request_init = {"parent": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8478,10 +16084,10 @@ def test_list_tables_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_tables(request) + client.create_authorized_view(request) -def test_list_tables_rest_flattened(): +def test_create_authorized_view_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8490,197 +16096,183 @@ def test_list_tables_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = bigtable_table_admin.ListTablesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/instances/sample2"} + sample_request = {"parent": "projects/sample1/instances/sample2/tables/sample3"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = bigtable_table_admin.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_tables(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2/{parent=projects/*/instances/*}/tables" % client.transport._host, - args[1], - ) - - -def test_list_tables_rest_flattened_error(transport: str = "rest"): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tables( - bigtable_table_admin.ListTablesRequest(), - parent="parent_value", - ) - - -def test_list_tables_rest_pager(transport: str = "rest"): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - bigtable_table_admin.ListTablesResponse( - tables=[ - table.Table(), - table.Table(), - table.Table(), - ], - next_page_token="abc", - ), - bigtable_table_admin.ListTablesResponse( - tables=[], - next_page_token="def", - ), - bigtable_table_admin.ListTablesResponse( - tables=[ - table.Table(), - ], - next_page_token="ghi", - ), - bigtable_table_admin.ListTablesResponse( - tables=[ - table.Table(), - table.Table(), - ], - ), - ) - # Two responses for two calls - response = response + response + authorized_view=table.AuthorizedView(name="name_value"), + authorized_view_id="authorized_view_id_value", + ) + mock_args.update(sample_request) - # Wrap the values into proper Response objs - response = tuple( - bigtable_table_admin.ListTablesResponse.to_json(x) for x in response + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_authorized_view(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/instances/*/tables/*}/authorizedViews" + % client.transport._host, + args[1], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/instances/sample2"} - pager = client.list_tables(request=sample_request) +def test_create_authorized_view_rest_flattened_error(transport: str = "rest"): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, table.Table) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_authorized_view( + bigtable_table_admin.CreateAuthorizedViewRequest(), + parent="parent_value", + authorized_view=table.AuthorizedView(name="name_value"), + authorized_view_id="authorized_view_id_value", + ) - pages = list(client.list_tables(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + +def test_create_authorized_view_rest_error(): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.GetTableRequest, + bigtable_table_admin.ListAuthorizedViewsRequest, dict, ], ) -def test_get_table_rest(request_type): +def test_list_authorized_views_rest(request_type): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = table.Table( - name="name_value", - granularity=table.Table.TimestampGranularity.MILLIS, - deletion_protection=True, + return_value = bigtable_table_admin.ListAuthorizedViewsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = table.Table.pb(return_value) + return_value = bigtable_table_admin.ListAuthorizedViewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_table(request) + response = client.list_authorized_views(request) # Establish that the response is the type that we expect. - assert isinstance(response, table.Table) - assert response.name == "name_value" - assert response.granularity == table.Table.TimestampGranularity.MILLIS - assert response.deletion_protection is True + assert isinstance(response, pagers.ListAuthorizedViewsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_table_rest_required_fields( - request_type=bigtable_table_admin.GetTableRequest, +def test_list_authorized_views_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_authorized_views + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_authorized_views + ] = mock_rpc + + request = {} + client.list_authorized_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_authorized_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_authorized_views_rest_required_fields( + request_type=bigtable_table_admin.ListAuthorizedViewsRequest, ): transport_class = transports.BigtableTableAdminRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_table._get_unset_required_fields(jsonified_request) + ).list_authorized_views._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_table._get_unset_required_fields(jsonified_request) + ).list_authorized_views._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8689,7 +16281,7 @@ def test_get_table_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = table.Table() + return_value = bigtable_table_admin.ListAuthorizedViewsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8710,30 +16302,41 @@ def test_get_table_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = table.Table.pb(return_value) + return_value = bigtable_table_admin.ListAuthorizedViewsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_table(request) + response = client.list_authorized_views(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_table_rest_unset_required_fields(): +def test_list_authorized_views_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.list_authorized_views._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_table_rest_interceptors(null_interceptor): +def test_list_authorized_views_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8746,14 +16349,14 @@ def test_get_table_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "post_get_table" + transports.BigtableTableAdminRestInterceptor, "post_list_authorized_views" ) as post, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_get_table" + transports.BigtableTableAdminRestInterceptor, "pre_list_authorized_views" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable_table_admin.GetTableRequest.pb( - bigtable_table_admin.GetTableRequest() + pb_message = bigtable_table_admin.ListAuthorizedViewsRequest.pb( + bigtable_table_admin.ListAuthorizedViewsRequest() ) transcode.return_value = { "method": "post", @@ -8765,17 +16368,21 @@ def test_get_table_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = table.Table.to_json(table.Table()) + req.return_value._content = ( + bigtable_table_admin.ListAuthorizedViewsResponse.to_json( + bigtable_table_admin.ListAuthorizedViewsResponse() + ) + ) - request = bigtable_table_admin.GetTableRequest() + request = bigtable_table_admin.ListAuthorizedViewsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = table.Table() + post.return_value = bigtable_table_admin.ListAuthorizedViewsResponse() - client.get_table( + client.list_authorized_views( request, metadata=[ ("key", "val"), @@ -8787,8 +16394,9 @@ def test_get_table_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_table_rest_bad_request( - transport: str = "rest", request_type=bigtable_table_admin.GetTableRequest +def test_list_authorized_views_rest_bad_request( + transport: str = "rest", + request_type=bigtable_table_admin.ListAuthorizedViewsRequest, ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8796,7 +16404,7 @@ def test_get_table_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = {"parent": "projects/sample1/instances/sample2/tables/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8808,10 +16416,10 @@ def test_get_table_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_table(request) + client.list_authorized_views(request) -def test_get_table_rest_flattened(): +def test_list_authorized_views_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8820,14 +16428,14 @@ def test_get_table_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = table.Table() + return_value = bigtable_table_admin.ListAuthorizedViewsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/instances/sample2/tables/sample3"} + sample_request = {"parent": "projects/sample1/instances/sample2/tables/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -8835,24 +16443,25 @@ def test_get_table_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = table.Table.pb(return_value) + return_value = bigtable_table_admin.ListAuthorizedViewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_table(**mock_args) + client.list_authorized_views(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/instances/*/tables/*}" % client.transport._host, + "%s/v2/{parent=projects/*/instances/*/tables/*}/authorizedViews" + % client.transport._host, args[1], ) -def test_get_table_rest_flattened_error(transport: str = "rest"): +def test_list_authorized_views_rest_flattened_error(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8861,172 +16470,196 @@ def test_get_table_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_table( - bigtable_table_admin.GetTableRequest(), - name="name_value", + client.list_authorized_views( + bigtable_table_admin.ListAuthorizedViewsRequest(), + parent="parent_value", ) -def test_get_table_rest_error(): - client = BigtableTableAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - bigtable_table_admin.UpdateTableRequest, - dict, - ], -) -def test_update_table_rest(request_type): +def test_list_authorized_views_rest_pager(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = { - "table": {"name": "projects/sample1/instances/sample2/tables/sample3"} - } - request_init["table"] = { - "name": "projects/sample1/instances/sample2/tables/sample3", - "cluster_states": {}, - "column_families": {}, - "granularity": 1, - "restore_info": { - "source_type": 1, - "backup_info": { - "backup": "backup_value", - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "source_table": "source_table_value", - "source_backup": "source_backup_value", - }, - }, - "change_stream_config": {"retention_period": {"seconds": 751, "nanos": 543}}, - "deletion_protection": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = bigtable_table_admin.UpdateTableRequest.meta.fields["table"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + table.AuthorizedView(), + ], + next_page_token="abc", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[], + next_page_token="def", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + ], + next_page_token="ghi", + ), + bigtable_table_admin.ListAuthorizedViewsResponse( + authorized_views=[ + table.AuthorizedView(), + table.AuthorizedView(), + ], + ), + ) + # Two responses for two calls + response = response + response - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Wrap the values into proper Response objs + response = tuple( + bigtable_table_admin.ListAuthorizedViewsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + sample_request = {"parent": "projects/sample1/instances/sample2/tables/sample3"} - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + pager = client.list_authorized_views(request=sample_request) - subfields_not_in_runtime = [] + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, table.AuthorizedView) for i in results) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["table"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + pages = list(client.list_authorized_views(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["table"][field])): - del request_init["table"][field][i][subfield] - else: - del request_init["table"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + bigtable_table_admin.GetAuthorizedViewRequest, + dict, + ], +) +def test_get_authorized_view_rest(request_type): + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = table.AuthorizedView( + name="name_value", + etag="etag_value", + deletion_protection=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = table.AuthorizedView.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_table(request) + response = client.get_authorized_view(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, table.AuthorizedView) + assert response.name == "name_value" + assert response.etag == "etag_value" + assert response.deletion_protection is True -def test_update_table_rest_required_fields( - request_type=bigtable_table_admin.UpdateTableRequest, +def test_get_authorized_view_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_authorized_view in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_authorized_view + ] = mock_rpc + + request = {} + client.get_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_authorized_view_rest_required_fields( + request_type=bigtable_table_admin.GetAuthorizedViewRequest, ): transport_class = transports.BigtableTableAdminRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_table._get_unset_required_fields(jsonified_request) + ).get_authorized_view._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_table._get_unset_required_fields(jsonified_request) + ).get_authorized_view._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set(("view",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9035,7 +16668,7 @@ def test_update_table_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = table.AuthorizedView() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9047,45 +16680,39 @@ def test_update_table_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = table.AuthorizedView.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_table(request) + response = client.get_authorized_view(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_table_rest_unset_required_fields(): +def test_get_authorized_view_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_table._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "table", - "updateMask", - ) - ) - ) + unset_fields = transport.get_authorized_view._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_table_rest_interceptors(null_interceptor): +def test_get_authorized_view_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9098,16 +16725,14 @@ def test_update_table_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "post_update_table" + transports.BigtableTableAdminRestInterceptor, "post_get_authorized_view" ) as post, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_update_table" + transports.BigtableTableAdminRestInterceptor, "pre_get_authorized_view" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = bigtable_table_admin.UpdateTableRequest.pb( - bigtable_table_admin.UpdateTableRequest() + pb_message = bigtable_table_admin.GetAuthorizedViewRequest.pb( + bigtable_table_admin.GetAuthorizedViewRequest() ) transcode.return_value = { "method": "post", @@ -9119,19 +16744,17 @@ def test_update_table_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = table.AuthorizedView.to_json(table.AuthorizedView()) - request = bigtable_table_admin.UpdateTableRequest() + request = bigtable_table_admin.GetAuthorizedViewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = table.AuthorizedView() - client.update_table( + client.get_authorized_view( request, metadata=[ ("key", "val"), @@ -9143,8 +16766,8 @@ def test_update_table_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_table_rest_bad_request( - transport: str = "rest", request_type=bigtable_table_admin.UpdateTableRequest +def test_get_authorized_view_rest_bad_request( + transport: str = "rest", request_type=bigtable_table_admin.GetAuthorizedViewRequest ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9153,7 +16776,7 @@ def test_update_table_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "table": {"name": "projects/sample1/instances/sample2/tables/sample3"} + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" } request = request_type(**request_init) @@ -9166,10 +16789,10 @@ def test_update_table_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_table(request) + client.get_authorized_view(request) -def test_update_table_rest_flattened(): +def test_get_authorized_view_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9178,41 +16801,42 @@ def test_update_table_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = table.AuthorizedView() # get arguments that satisfy an http rule for this method sample_request = { - "table": {"name": "projects/sample1/instances/sample2/tables/sample3"} + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" } # get truthy value for each flattened field mock_args = dict( - table=gba_table.Table(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = table.AuthorizedView.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_table(**mock_args) + client.get_authorized_view(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{table.name=projects/*/instances/*/tables/*}" + "%s/v2/{name=projects/*/instances/*/tables/*/authorizedViews/*}" % client.transport._host, args[1], ) -def test_update_table_rest_flattened_error(transport: str = "rest"): +def test_get_authorized_view_rest_flattened_error(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9221,14 +16845,13 @@ def test_update_table_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_table( - bigtable_table_admin.UpdateTableRequest(), - table=gba_table.Table(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_authorized_view( + bigtable_table_admin.GetAuthorizedViewRequest(), + name="name_value", ) -def test_update_table_rest_error(): +def test_get_authorized_view_rest_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9237,73 +16860,199 @@ def test_update_table_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.DeleteTableRequest, + bigtable_table_admin.UpdateAuthorizedViewRequest, dict, ], ) -def test_delete_table_rest(request_type): +def test_update_authorized_view_rest(request_type): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = { + "authorized_view": { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + } + request_init["authorized_view"] = { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4", + "subset_view": { + "row_prefixes": [b"row_prefixes_blob1", b"row_prefixes_blob2"], + "family_subsets": {}, + }, + "etag": "etag_value", + "deletion_protection": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = bigtable_table_admin.UpdateAuthorizedViewRequest.meta.fields[ + "authorized_view" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["authorized_view"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["authorized_view"][field])): + del request_init["authorized_view"][field][i][subfield] + else: + del request_init["authorized_view"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_table(request) + response = client.update_authorized_view(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_table_rest_required_fields( - request_type=bigtable_table_admin.DeleteTableRequest, +def test_update_authorized_view_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_authorized_view + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_authorized_view + ] = mock_rpc + + request = {} + client.update_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_authorized_view_rest_required_fields( + request_type=bigtable_table_admin.UpdateAuthorizedViewRequest, ): transport_class = transports.BigtableTableAdminRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_table._get_unset_required_fields(jsonified_request) + ).update_authorized_view._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_table._get_unset_required_fields(jsonified_request) + ).update_authorized_view._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "ignore_warnings", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9312,7 +17061,7 @@ def test_delete_table_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9324,36 +17073,45 @@ def test_delete_table_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_table(request) + response = client.update_authorized_view(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_table_rest_unset_required_fields(): +def test_update_authorized_view_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - + unset_fields = transport.update_authorized_view._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "ignoreWarnings", + "updateMask", + ) + ) + & set(("authorizedView",)) + ) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_table_rest_interceptors(null_interceptor): +def test_update_authorized_view_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9366,11 +17124,16 @@ def test_delete_table_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_delete_table" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "post_update_authorized_view" + ) as post, mock.patch.object( + transports.BigtableTableAdminRestInterceptor, "pre_update_authorized_view" ) as pre: pre.assert_not_called() - pb_message = bigtable_table_admin.DeleteTableRequest.pb( - bigtable_table_admin.DeleteTableRequest() + post.assert_not_called() + pb_message = bigtable_table_admin.UpdateAuthorizedViewRequest.pb( + bigtable_table_admin.UpdateAuthorizedViewRequest() ) transcode.return_value = { "method": "post", @@ -9382,15 +17145,19 @@ def test_delete_table_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = bigtable_table_admin.DeleteTableRequest() + request = bigtable_table_admin.UpdateAuthorizedViewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_table( + client.update_authorized_view( request, metadata=[ ("key", "val"), @@ -9399,10 +17166,12 @@ def test_delete_table_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_table_rest_bad_request( - transport: str = "rest", request_type=bigtable_table_admin.DeleteTableRequest +def test_update_authorized_view_rest_bad_request( + transport: str = "rest", + request_type=bigtable_table_admin.UpdateAuthorizedViewRequest, ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9410,7 +17179,11 @@ def test_delete_table_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = { + "authorized_view": { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9422,10 +17195,10 @@ def test_delete_table_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_table(request) + client.update_authorized_view(request) -def test_delete_table_rest_flattened(): +def test_update_authorized_view_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9434,37 +17207,43 @@ def test_delete_table_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/instances/sample2/tables/sample3"} + sample_request = { + "authorized_view": { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + authorized_view=table.AuthorizedView(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_table(**mock_args) + client.update_authorized_view(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/instances/*/tables/*}" % client.transport._host, + "%s/v2/{authorized_view.name=projects/*/instances/*/tables/*/authorizedViews/*}" + % client.transport._host, args[1], ) -def test_delete_table_rest_flattened_error(transport: str = "rest"): +def test_update_authorized_view_rest_flattened_error(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9473,13 +17252,14 @@ def test_delete_table_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_table( - bigtable_table_admin.DeleteTableRequest(), - name="name_value", + client.update_authorized_view( + bigtable_table_admin.UpdateAuthorizedViewRequest(), + authorized_view=table.AuthorizedView(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_delete_table_rest_error(): +def test_update_authorized_view_rest_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9488,40 +17268,83 @@ def test_delete_table_rest_error(): @pytest.mark.parametrize( "request_type", [ - bigtable_table_admin.UndeleteTableRequest, + bigtable_table_admin.DeleteAuthorizedViewRequest, dict, ], ) -def test_undelete_table_rest(request_type): +def test_delete_authorized_view_rest(request_type): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undelete_table(request) + response = client.delete_authorized_view(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert response is None -def test_undelete_table_rest_required_fields( - request_type=bigtable_table_admin.UndeleteTableRequest, +def test_delete_authorized_view_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_authorized_view + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_authorized_view + ] = mock_rpc + + request = {} + client.delete_authorized_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_authorized_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_authorized_view_rest_required_fields( + request_type=bigtable_table_admin.DeleteAuthorizedViewRequest, ): transport_class = transports.BigtableTableAdminRestTransport @@ -9530,17 +17353,14 @@ def test_undelete_table_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undelete_table._get_unset_required_fields(jsonified_request) + ).delete_authorized_view._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9549,7 +17369,9 @@ def test_undelete_table_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).undelete_table._get_unset_required_fields(jsonified_request) + ).delete_authorized_view._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9563,7 +17385,7 @@ def test_undelete_table_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9575,37 +17397,36 @@ def test_undelete_table_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.undelete_table(request) + response = client.delete_authorized_view(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_undelete_table_rest_unset_required_fields(): +def test_delete_authorized_view_rest_unset_required_fields(): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.undelete_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_authorized_view._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undelete_table_rest_interceptors(null_interceptor): +def test_delete_authorized_view_rest_interceptors(null_interceptor): transport = transports.BigtableTableAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9618,16 +17439,11 @@ def test_undelete_table_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "post_undelete_table" - ) as post, mock.patch.object( - transports.BigtableTableAdminRestInterceptor, "pre_undelete_table" + transports.BigtableTableAdminRestInterceptor, "pre_delete_authorized_view" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = bigtable_table_admin.UndeleteTableRequest.pb( - bigtable_table_admin.UndeleteTableRequest() + pb_message = bigtable_table_admin.DeleteAuthorizedViewRequest.pb( + bigtable_table_admin.DeleteAuthorizedViewRequest() ) transcode.return_value = { "method": "post", @@ -9639,19 +17455,15 @@ def test_undelete_table_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - request = bigtable_table_admin.UndeleteTableRequest() + request = bigtable_table_admin.DeleteAuthorizedViewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - client.undelete_table( + client.delete_authorized_view( request, metadata=[ ("key", "val"), @@ -9660,11 +17472,11 @@ def test_undelete_table_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_undelete_table_rest_bad_request( - transport: str = "rest", request_type=bigtable_table_admin.UndeleteTableRequest +def test_delete_authorized_view_rest_bad_request( + transport: str = "rest", + request_type=bigtable_table_admin.DeleteAuthorizedViewRequest, ): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9672,7 +17484,9 @@ def test_undelete_table_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/instances/sample2/tables/sample3"} + request_init = { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9684,10 +17498,10 @@ def test_undelete_table_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.undelete_table(request) + client.delete_authorized_view(request) -def test_undelete_table_rest_flattened(): +def test_delete_authorized_view_rest_flattened(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9696,10 +17510,12 @@ def test_undelete_table_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/instances/sample2/tables/sample3"} + sample_request = { + "name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } # get truthy value for each flattened field mock_args = dict( @@ -9710,24 +17526,24 @@ def test_undelete_table_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.undelete_table(**mock_args) + client.delete_authorized_view(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/instances/*/tables/*}:undelete" + "%s/v2/{name=projects/*/instances/*/tables/*/authorizedViews/*}" % client.transport._host, args[1], ) -def test_undelete_table_rest_flattened_error(transport: str = "rest"): +def test_delete_authorized_view_rest_flattened_error(transport: str = "rest"): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9736,13 +17552,13 @@ def test_undelete_table_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.undelete_table( - bigtable_table_admin.UndeleteTableRequest(), + client.delete_authorized_view( + bigtable_table_admin.DeleteAuthorizedViewRequest(), name="name_value", ) -def test_undelete_table_rest_error(): +def test_delete_authorized_view_rest_error(): client = BigtableTableAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9792,6 +17608,47 @@ def test_modify_column_families_rest(request_type): assert response.deletion_protection is True +def test_modify_column_families_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_column_families + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_column_families + ] = mock_rpc + + request = {} + client.modify_column_families(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_column_families(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_modify_column_families_rest_required_fields( request_type=bigtable_table_admin.ModifyColumnFamiliesRequest, ): @@ -10075,6 +17932,42 @@ def test_drop_row_range_rest(request_type): assert response is None +def test_drop_row_range_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.drop_row_range in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.drop_row_range] = mock_rpc + + request = {} + client.drop_row_range(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.drop_row_range(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_drop_row_range_rest_required_fields( request_type=bigtable_table_admin.DropRowRangeRequest, ): @@ -10280,6 +18173,47 @@ def test_generate_consistency_token_rest(request_type): assert response.consistency_token == "consistency_token_value" +def test_generate_consistency_token_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_consistency_token + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_consistency_token + ] = mock_rpc + + request = {} + client.generate_consistency_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_consistency_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_generate_consistency_token_rest_required_fields( request_type=bigtable_table_admin.GenerateConsistencyTokenRequest, ): @@ -10558,6 +18492,44 @@ def test_check_consistency_rest(request_type): assert response.consistent is True +def test_check_consistency_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.check_consistency in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.check_consistency + ] = mock_rpc + + request = {} + client.check_consistency(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_consistency(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_check_consistency_rest_required_fields( request_type=bigtable_table_admin.CheckConsistencyRequest, ): @@ -10842,6 +18814,46 @@ def test_snapshot_table_rest(request_type): assert response.operation.name == "operations/spam" +def test_snapshot_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.snapshot_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.snapshot_table] = mock_rpc + + request = {} + client.snapshot_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.snapshot_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_snapshot_table_rest_required_fields( request_type=bigtable_table_admin.SnapshotTableRequest, ): @@ -11141,6 +19153,42 @@ def test_get_snapshot_rest(request_type): assert response.description == "description_value" +def test_get_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + + request = {} + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_snapshot_rest_required_fields( request_type=bigtable_table_admin.GetSnapshotRequest, ): @@ -11404,13 +19452,49 @@ def test_list_snapshots_rest(request_type): return_value = bigtable_table_admin.ListSnapshotsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_snapshots(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_snapshots in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + + request = {} + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_snapshots(request) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsPager) - assert response.next_page_token == "next_page_token_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 def test_list_snapshots_rest_required_fields( @@ -11756,6 +19840,42 @@ def test_delete_snapshot_rest(request_type): assert response is None +def test_delete_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + + request = {} + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_snapshot_rest_required_fields( request_type=bigtable_table_admin.DeleteSnapshotRequest, ): @@ -12103,6 +20223,46 @@ def get_message_fields(field): assert response.operation.name == "operations/spam" +def test_create_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + + request = {} + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_backup_rest_required_fields( request_type=bigtable_table_admin.CreateBackupRequest, ): @@ -12411,6 +20571,42 @@ def test_get_backup_rest(request_type): assert response.state == table.Backup.State.CREATING +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_backup_rest_required_fields( request_type=bigtable_table_admin.GetBackupRequest, ): @@ -12786,6 +20982,42 @@ def get_message_fields(field): assert response.state == table.Backup.State.CREATING +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_backup_rest_required_fields( request_type=bigtable_table_admin.UpdateBackupRequest, ): @@ -13067,6 +21299,42 @@ def test_delete_backup_rest(request_type): assert response is None +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_backup_rest_required_fields( request_type=bigtable_table_admin.DeleteBackupRequest, ): @@ -13328,6 +21596,42 @@ def test_list_backups_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_backups_rest_required_fields( request_type=bigtable_table_admin.ListBackupsRequest, ): @@ -13673,6 +21977,46 @@ def test_restore_table_rest(request_type): assert response.operation.name == "operations/spam" +def test_restore_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_table] = mock_rpc + + request = {} + client.restore_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_restore_table_rest_required_fields( request_type=bigtable_table_admin.RestoreTableRequest, ): @@ -13893,6 +22237,46 @@ def test_copy_backup_rest(request_type): assert response.operation.name == "operations/spam" +def test_copy_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + + request = {} + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_copy_backup_rest_required_fields( request_type=bigtable_table_admin.CopyBackupRequest, ): @@ -14187,6 +22571,42 @@ def test_get_iam_policy_rest(request_type): assert response.etag == b"etag_blob" +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=iam_policy_pb2.GetIamPolicyRequest, ): @@ -14452,6 +22872,42 @@ def test_set_iam_policy_rest(request_type): assert response.etag == b"etag_blob" +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=iam_policy_pb2.SetIamPolicyRequest, ): @@ -14723,6 +23179,46 @@ def test_test_iam_permissions_rest(request_type): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableTableAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=iam_policy_pb2.TestIamPermissionsRequest, ): @@ -15110,6 +23606,11 @@ def test_bigtable_table_admin_base_transport(): "update_table", "delete_table", "undelete_table", + "create_authorized_view", + "list_authorized_views", + "get_authorized_view", + "update_authorized_view", + "delete_authorized_view", "modify_column_families", "drop_row_range", "generate_consistency_token", @@ -15459,6 +23960,21 @@ def test_bigtable_table_admin_client_transport_session_collision(transport_name) session1 = client1.transport.undelete_table._session session2 = client2.transport.undelete_table._session assert session1 != session2 + session1 = client1.transport.create_authorized_view._session + session2 = client2.transport.create_authorized_view._session + assert session1 != session2 + session1 = client1.transport.list_authorized_views._session + session2 = client2.transport.list_authorized_views._session + assert session1 != session2 + session1 = client1.transport.get_authorized_view._session + session2 = client2.transport.get_authorized_view._session + assert session1 != session2 + session1 = client1.transport.update_authorized_view._session + session2 = client2.transport.update_authorized_view._session + assert session1 != session2 + session1 = client1.transport.delete_authorized_view._session + session2 = client2.transport.delete_authorized_view._session + assert session1 != session2 session1 = client1.transport.modify_column_families._session session2 = client2.transport.modify_column_families._session assert session1 != session2 @@ -15675,11 +24191,42 @@ def test_bigtable_table_admin_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_backup_path(): +def test_authorized_view_path(): project = "squid" instance = "clam" - cluster = "whelk" - backup = "octopus" + table = "whelk" + authorized_view = "octopus" + expected = "projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}".format( + project=project, + instance=instance, + table=table, + authorized_view=authorized_view, + ) + actual = BigtableTableAdminClient.authorized_view_path( + project, instance, table, authorized_view + ) + assert expected == actual + + +def test_parse_authorized_view_path(): + expected = { + "project": "oyster", + "instance": "nudibranch", + "table": "cuttlefish", + "authorized_view": "mussel", + } + path = BigtableTableAdminClient.authorized_view_path(**expected) + + # Check that the path construction is reversible. + actual = BigtableTableAdminClient.parse_authorized_view_path(path) + assert expected == actual + + +def test_backup_path(): + project = "winkle" + instance = "nautilus" + cluster = "scallop" + backup = "abalone" expected = "projects/{project}/instances/{instance}/clusters/{cluster}/backups/{backup}".format( project=project, instance=instance, @@ -15692,10 +24239,10 @@ def test_backup_path(): def test_parse_backup_path(): expected = { - "project": "oyster", - "instance": "nudibranch", - "cluster": "cuttlefish", - "backup": "mussel", + "project": "squid", + "instance": "clam", + "cluster": "whelk", + "backup": "octopus", } path = BigtableTableAdminClient.backup_path(**expected) @@ -15705,9 +24252,9 @@ def test_parse_backup_path(): def test_cluster_path(): - project = "winkle" - instance = "nautilus" - cluster = "scallop" + project = "oyster" + instance = "nudibranch" + cluster = "cuttlefish" expected = "projects/{project}/instances/{instance}/clusters/{cluster}".format( project=project, instance=instance, @@ -15719,9 +24266,9 @@ def test_cluster_path(): def test_parse_cluster_path(): expected = { - "project": "abalone", - "instance": "squid", - "cluster": "clam", + "project": "mussel", + "instance": "winkle", + "cluster": "nautilus", } path = BigtableTableAdminClient.cluster_path(**expected) @@ -15731,11 +24278,11 @@ def test_parse_cluster_path(): def test_crypto_key_version_path(): - project = "whelk" - location = "octopus" - key_ring = "oyster" - crypto_key = "nudibranch" - crypto_key_version = "cuttlefish" + project = "scallop" + location = "abalone" + key_ring = "squid" + crypto_key = "clam" + crypto_key_version = "whelk" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -15751,11 +24298,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "mussel", - "location": "winkle", - "key_ring": "nautilus", - "crypto_key": "scallop", - "crypto_key_version": "abalone", + "project": "octopus", + "location": "oyster", + "key_ring": "nudibranch", + "crypto_key": "cuttlefish", + "crypto_key_version": "mussel", } path = BigtableTableAdminClient.crypto_key_version_path(**expected) @@ -15765,8 +24312,8 @@ def test_parse_crypto_key_version_path(): def test_instance_path(): - project = "squid" - instance = "clam" + project = "winkle" + instance = "nautilus" expected = "projects/{project}/instances/{instance}".format( project=project, instance=instance, @@ -15777,8 +24324,8 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "whelk", - "instance": "octopus", + "project": "scallop", + "instance": "abalone", } path = BigtableTableAdminClient.instance_path(**expected) @@ -15788,10 +24335,10 @@ def test_parse_instance_path(): def test_snapshot_path(): - project = "oyster" - instance = "nudibranch" - cluster = "cuttlefish" - snapshot = "mussel" + project = "squid" + instance = "clam" + cluster = "whelk" + snapshot = "octopus" expected = "projects/{project}/instances/{instance}/clusters/{cluster}/snapshots/{snapshot}".format( project=project, instance=instance, @@ -15806,10 +24353,10 @@ def test_snapshot_path(): def test_parse_snapshot_path(): expected = { - "project": "winkle", - "instance": "nautilus", - "cluster": "scallop", - "snapshot": "abalone", + "project": "oyster", + "instance": "nudibranch", + "cluster": "cuttlefish", + "snapshot": "mussel", } path = BigtableTableAdminClient.snapshot_path(**expected) @@ -15819,9 +24366,9 @@ def test_parse_snapshot_path(): def test_table_path(): - project = "squid" - instance = "clam" - table = "whelk" + project = "winkle" + instance = "nautilus" + table = "scallop" expected = "projects/{project}/instances/{instance}/tables/{table}".format( project=project, instance=instance, @@ -15833,9 +24380,9 @@ def test_table_path(): def test_parse_table_path(): expected = { - "project": "octopus", - "instance": "oyster", - "table": "nudibranch", + "project": "abalone", + "instance": "squid", + "table": "clam", } path = BigtableTableAdminClient.table_path(**expected) @@ -15845,7 +24392,7 @@ def test_parse_table_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -15855,7 +24402,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = BigtableTableAdminClient.common_billing_account_path(**expected) @@ -15865,7 +24412,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -15875,7 +24422,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = BigtableTableAdminClient.common_folder_path(**expected) @@ -15885,7 +24432,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -15895,7 +24442,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = BigtableTableAdminClient.common_organization_path(**expected) @@ -15905,7 +24452,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -15915,7 +24462,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = BigtableTableAdminClient.common_project_path(**expected) @@ -15925,8 +24472,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -15937,8 +24484,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = BigtableTableAdminClient.common_location_path(**expected) diff --git a/tests/unit/gapic/bigtable_v2/__init__.py b/tests/unit/gapic/bigtable_v2/__init__.py index 89a37dc92..8f6cf0682 100644 --- a/tests/unit/gapic/bigtable_v2/__init__.py +++ b/tests/unit/gapic/bigtable_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/bigtable_v2/test_bigtable.py b/tests/unit/gapic/bigtable_v2/test_bigtable.py index 105f9e49e..5a62b3dfa 100644 --- a/tests/unit/gapic/bigtable_v2/test_bigtable.py +++ b/tests/unit/gapic/bigtable_v2/test_bigtable.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1099,7 +1099,8 @@ def test_read_rows(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.ReadRowsRequest() + request = bigtable.ReadRowsRequest() + assert args[0] == request # Establish that the response is the type that we expect. for message in response: @@ -1116,12 +1117,148 @@ def test_read_rows_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.read_rows), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.read_rows() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.ReadRowsRequest() +def test_read_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.ReadRowsRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.read_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ReadRowsRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_read_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.read_rows in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.read_rows] = mock_rpc + request = {} + client.read_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_read_rows_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.ReadRowsResponse()] + ) + response = await client.read_rows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ReadRowsRequest() + + +@pytest.mark.asyncio +async def test_read_rows_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.read_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.read_rows + ] = mock_object + + request = {} + await client.read_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.read_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_read_rows_async( transport: str = "grpc_asyncio", request_type=bigtable.ReadRowsRequest @@ -1147,7 +1284,8 @@ async def test_read_rows_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.ReadRowsRequest() + request = bigtable.ReadRowsRequest() + assert args[0] == request # Establish that the response is the type that we expect. message = await response.read() @@ -1200,6 +1338,27 @@ def test_read_rows_routing_parameters(): _, _, kw = call.mock_calls[0] # This test doesn't assert anything useful. assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.ReadRowsRequest( + **{ + "authorized_view_name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: + call.return_value = iter([bigtable.ReadRowsResponse()]) + client.read_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_read_rows_flattened(): @@ -1318,7 +1477,8 @@ def test_sample_row_keys(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.SampleRowKeysRequest() + request = bigtable.SampleRowKeysRequest() + assert args[0] == request # Establish that the response is the type that we expect. for message in response: @@ -1335,12 +1495,150 @@ def test_sample_row_keys_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.sample_row_keys), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.sample_row_keys() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.SampleRowKeysRequest() +def test_sample_row_keys_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.SampleRowKeysRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.sample_row_keys), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.sample_row_keys(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.SampleRowKeysRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_sample_row_keys_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sample_row_keys in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.sample_row_keys] = mock_rpc + request = {} + client.sample_row_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sample_row_keys(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_sample_row_keys_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.sample_row_keys), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.SampleRowKeysResponse()] + ) + response = await client.sample_row_keys() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.SampleRowKeysRequest() + + +@pytest.mark.asyncio +async def test_sample_row_keys_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.sample_row_keys + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.sample_row_keys + ] = mock_object + + request = {} + await client.sample_row_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.sample_row_keys(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_sample_row_keys_async( transport: str = "grpc_asyncio", request_type=bigtable.SampleRowKeysRequest @@ -1366,7 +1664,8 @@ async def test_sample_row_keys_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.SampleRowKeysRequest() + request = bigtable.SampleRowKeysRequest() + assert args[0] == request # Establish that the response is the type that we expect. message = await response.read() @@ -1419,6 +1718,27 @@ def test_sample_row_keys_routing_parameters(): _, _, kw = call.mock_calls[0] # This test doesn't assert anything useful. assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.SampleRowKeysRequest( + **{ + "authorized_view_name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.sample_row_keys), "__call__") as call: + call.return_value = iter([bigtable.SampleRowKeysResponse()]) + client.sample_row_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_sample_row_keys_flattened(): @@ -1537,7 +1857,8 @@ def test_mutate_row(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.MutateRowRequest() + request = bigtable.MutateRowRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.MutateRowResponse) @@ -1553,12 +1874,147 @@ def test_mutate_row_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.mutate_row), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.mutate_row() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.MutateRowRequest() +def test_mutate_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.MutateRowRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mutate_row), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.mutate_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.MutateRowRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_mutate_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mutate_row in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mutate_row] = mock_rpc + request = {} + client.mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mutate_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_mutate_row_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mutate_row), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable.MutateRowResponse() + ) + response = await client.mutate_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.MutateRowRequest() + + +@pytest.mark.asyncio +async def test_mutate_row_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.mutate_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.mutate_row + ] = mock_object + + request = {} + await client.mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.mutate_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_mutate_row_async( transport: str = "grpc_asyncio", request_type=bigtable.MutateRowRequest @@ -1583,7 +2039,8 @@ async def test_mutate_row_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.MutateRowRequest() + request = bigtable.MutateRowRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.MutateRowResponse) @@ -1635,6 +2092,27 @@ def test_mutate_row_routing_parameters(): _, _, kw = call.mock_calls[0] # This test doesn't assert anything useful. assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.MutateRowRequest( + **{ + "authorized_view_name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mutate_row), "__call__") as call: + call.return_value = bigtable.MutateRowResponse() + client.mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_mutate_row_flattened(): @@ -1799,7 +2277,8 @@ def test_mutate_rows(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.MutateRowsRequest() + request = bigtable.MutateRowsRequest() + assert args[0] == request # Establish that the response is the type that we expect. for message in response: @@ -1816,12 +2295,150 @@ def test_mutate_rows_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.mutate_rows), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.mutate_rows() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.MutateRowsRequest() +def test_mutate_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.MutateRowsRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mutate_rows), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.mutate_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.MutateRowsRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_mutate_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mutate_rows in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mutate_rows] = mock_rpc + request = {} + client.mutate_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mutate_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_mutate_rows_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mutate_rows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.MutateRowsResponse()] + ) + response = await client.mutate_rows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.MutateRowsRequest() + + +@pytest.mark.asyncio +async def test_mutate_rows_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.mutate_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.mutate_rows + ] = mock_object + + request = {} + await client.mutate_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.mutate_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_mutate_rows_async( transport: str = "grpc_asyncio", request_type=bigtable.MutateRowsRequest @@ -1847,7 +2464,8 @@ async def test_mutate_rows_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.MutateRowsRequest() + request = bigtable.MutateRowsRequest() + assert args[0] == request # Establish that the response is the type that we expect. message = await response.read() @@ -1900,6 +2518,27 @@ def test_mutate_rows_routing_parameters(): _, _, kw = call.mock_calls[0] # This test doesn't assert anything useful. assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.MutateRowsRequest( + **{ + "authorized_view_name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.mutate_rows), "__call__") as call: + call.return_value = iter([bigtable.MutateRowsResponse()]) + client.mutate_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_mutate_rows_flattened(): @@ -2015,48 +2654,196 @@ def test_check_and_mutate_row(request_type, transport: str = "grpc"): transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_and_mutate_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = bigtable.CheckAndMutateRowResponse( + predicate_matched=True, + ) + response = client.check_and_mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = bigtable.CheckAndMutateRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, bigtable.CheckAndMutateRowResponse) + assert response.predicate_matched is True + + +def test_check_and_mutate_row_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_and_mutate_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_and_mutate_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.CheckAndMutateRowRequest() + + +def test_check_and_mutate_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.CheckAndMutateRowRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_and_mutate_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.check_and_mutate_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.CheckAndMutateRowRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_check_and_mutate_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_and_mutate_row), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = bigtable.CheckAndMutateRowResponse( - predicate_matched=True, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.check_and_mutate_row in client._transport._wrapped_methods ) - response = client.check_and_mutate_row(request) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.check_and_mutate_row + ] = mock_rpc + request = {} + client.check_and_mutate_row(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.CheckAndMutateRowRequest() + assert mock_rpc.call_count == 1 - # Establish that the response is the type that we expect. - assert isinstance(response, bigtable.CheckAndMutateRowResponse) - assert response.predicate_matched is True + client.check_and_mutate_row(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_check_and_mutate_row_empty_call(): + +@pytest.mark.asyncio +async def test_check_and_mutate_row_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = BigtableClient( + client = BigtableAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.check_and_mutate_row), "__call__" ) as call: - client.check_and_mutate_row() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable.CheckAndMutateRowResponse( + predicate_matched=True, + ) + ) + response = await client.check_and_mutate_row() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.CheckAndMutateRowRequest() +@pytest.mark.asyncio +async def test_check_and_mutate_row_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.check_and_mutate_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.check_and_mutate_row + ] = mock_object + + request = {} + await client.check_and_mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.check_and_mutate_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_check_and_mutate_row_async( transport: str = "grpc_asyncio", request_type=bigtable.CheckAndMutateRowRequest @@ -2085,7 +2872,8 @@ async def test_check_and_mutate_row_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.CheckAndMutateRowRequest() + request = bigtable.CheckAndMutateRowRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.CheckAndMutateRowResponse) @@ -2142,6 +2930,29 @@ def test_check_and_mutate_row_routing_parameters(): _, _, kw = call.mock_calls[0] # This test doesn't assert anything useful. assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.CheckAndMutateRowRequest( + **{ + "authorized_view_name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_and_mutate_row), "__call__" + ) as call: + call.return_value = bigtable.CheckAndMutateRowResponse() + client.check_and_mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] def test_check_and_mutate_row_flattened(): @@ -2410,7 +3221,8 @@ def test_ping_and_warm(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.PingAndWarmRequest() + request = bigtable.PingAndWarmRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.PingAndWarmResponse) @@ -2426,12 +3238,147 @@ def test_ping_and_warm_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.ping_and_warm), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.ping_and_warm() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.PingAndWarmRequest() +def test_ping_and_warm_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.PingAndWarmRequest( + name="name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.ping_and_warm), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.ping_and_warm(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.PingAndWarmRequest( + name="name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_ping_and_warm_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.ping_and_warm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.ping_and_warm] = mock_rpc + request = {} + client.ping_and_warm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.ping_and_warm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_ping_and_warm_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.ping_and_warm), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable.PingAndWarmResponse() + ) + response = await client.ping_and_warm() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.PingAndWarmRequest() + + +@pytest.mark.asyncio +async def test_ping_and_warm_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.ping_and_warm + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.ping_and_warm + ] = mock_object + + request = {} + await client.ping_and_warm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.ping_and_warm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_ping_and_warm_async( transport: str = "grpc_asyncio", request_type=bigtable.PingAndWarmRequest @@ -2456,7 +3403,8 @@ async def test_ping_and_warm_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.PingAndWarmRequest() + request = bigtable.PingAndWarmRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.PingAndWarmResponse) @@ -2630,7 +3578,8 @@ def test_read_modify_write_row(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.ReadModifyWriteRowRequest() + request = bigtable.ReadModifyWriteRowRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.ReadModifyWriteRowResponse) @@ -2648,12 +3597,158 @@ def test_read_modify_write_row_empty_call(): with mock.patch.object( type(client.transport.read_modify_write_row), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.read_modify_write_row() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.ReadModifyWriteRowRequest() +def test_read_modify_write_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.ReadModifyWriteRowRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read_modify_write_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.read_modify_write_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ReadModifyWriteRowRequest( + table_name="table_name_value", + authorized_view_name="authorized_view_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_read_modify_write_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.read_modify_write_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.read_modify_write_row + ] = mock_rpc + request = {} + client.read_modify_write_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read_modify_write_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_read_modify_write_row_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read_modify_write_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + bigtable.ReadModifyWriteRowResponse() + ) + response = await client.read_modify_write_row() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ReadModifyWriteRowRequest() + + +@pytest.mark.asyncio +async def test_read_modify_write_row_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.read_modify_write_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.read_modify_write_row + ] = mock_object + + request = {} + await client.read_modify_write_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.read_modify_write_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_read_modify_write_row_async( transport: str = "grpc_asyncio", request_type=bigtable.ReadModifyWriteRowRequest @@ -2680,7 +3775,8 @@ async def test_read_modify_write_row_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.ReadModifyWriteRowRequest() + request = bigtable.ReadModifyWriteRowRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, bigtable.ReadModifyWriteRowResponse) @@ -2698,9 +3794,28 @@ def test_read_modify_write_row_routing_parameters(): # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable.ReadModifyWriteRowRequest( - **{"table_name": "projects/sample1/instances/sample2/tables/sample3"} - ) + request = bigtable.ReadModifyWriteRowRequest( + **{"table_name": "projects/sample1/instances/sample2/tables/sample3"} + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read_modify_write_row), "__call__" + ) as call: + call.return_value = bigtable.ReadModifyWriteRowResponse() + client.read_modify_write_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw["metadata"] + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = bigtable.ReadModifyWriteRowRequest(**{"app_profile_id": "sample1"}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2719,7 +3834,11 @@ def test_read_modify_write_row_routing_parameters(): assert kw["metadata"] # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = bigtable.ReadModifyWriteRowRequest(**{"app_profile_id": "sample1"}) + request = bigtable.ReadModifyWriteRowRequest( + **{ + "authorized_view_name": "projects/sample1/instances/sample2/tables/sample3/authorizedViews/sample4" + } + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2886,7 +4005,8 @@ def test_generate_initial_change_stream_partitions( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.GenerateInitialChangeStreamPartitionsRequest() + request = bigtable.GenerateInitialChangeStreamPartitionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. for message in response: @@ -2907,12 +4027,157 @@ def test_generate_initial_change_stream_partitions_empty_call(): with mock.patch.object( type(client.transport.generate_initial_change_stream_partitions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.generate_initial_change_stream_partitions() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.GenerateInitialChangeStreamPartitionsRequest() +def test_generate_initial_change_stream_partitions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.GenerateInitialChangeStreamPartitionsRequest( + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_initial_change_stream_partitions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_initial_change_stream_partitions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.GenerateInitialChangeStreamPartitionsRequest( + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_generate_initial_change_stream_partitions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_initial_change_stream_partitions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_initial_change_stream_partitions + ] = mock_rpc + request = {} + client.generate_initial_change_stream_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_initial_change_stream_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_initial_change_stream_partitions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_initial_change_stream_partitions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.GenerateInitialChangeStreamPartitionsResponse()] + ) + response = await client.generate_initial_change_stream_partitions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.GenerateInitialChangeStreamPartitionsRequest() + + +@pytest.mark.asyncio +async def test_generate_initial_change_stream_partitions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_initial_change_stream_partitions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_initial_change_stream_partitions + ] = mock_object + + request = {} + await client.generate_initial_change_stream_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.generate_initial_change_stream_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_generate_initial_change_stream_partitions_async( transport: str = "grpc_asyncio", @@ -2941,7 +4206,8 @@ async def test_generate_initial_change_stream_partitions_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.GenerateInitialChangeStreamPartitionsRequest() + request = bigtable.GenerateInitialChangeStreamPartitionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. message = await response.read() @@ -3147,7 +4413,8 @@ def test_read_change_stream(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.ReadChangeStreamRequest() + request = bigtable.ReadChangeStreamRequest() + assert args[0] == request # Establish that the response is the type that we expect. for message in response: @@ -3166,12 +4433,156 @@ def test_read_change_stream_empty_call(): with mock.patch.object( type(client.transport.read_change_stream), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.read_change_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == bigtable.ReadChangeStreamRequest() +def test_read_change_stream_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = bigtable.ReadChangeStreamRequest( + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read_change_stream), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.read_change_stream(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ReadChangeStreamRequest( + table_name="table_name_value", + app_profile_id="app_profile_id_value", + ) + + +def test_read_change_stream_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.read_change_stream in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.read_change_stream + ] = mock_rpc + request = {} + client.read_change_stream(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read_change_stream(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_read_change_stream_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read_change_stream), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[bigtable.ReadChangeStreamResponse()] + ) + response = await client.read_change_stream() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == bigtable.ReadChangeStreamRequest() + + +@pytest.mark.asyncio +async def test_read_change_stream_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BigtableAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.read_change_stream + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[ + client._client._transport.read_change_stream + ] = mock_object + + request = {} + await client.read_change_stream(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.read_change_stream(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_read_change_stream_async( transport: str = "grpc_asyncio", request_type=bigtable.ReadChangeStreamRequest @@ -3199,7 +4610,8 @@ async def test_read_change_stream_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == bigtable.ReadChangeStreamRequest() + request = bigtable.ReadChangeStreamRequest() + assert args[0] == request # Establish that the response is the type that we expect. message = await response.read() @@ -3418,92 +4830,40 @@ def test_read_rows_rest(request_type): assert response.last_scanned_row_key == b"last_scanned_row_key_blob" -def test_read_rows_rest_required_fields(request_type=bigtable.ReadRowsRequest): - transport_class = transports.BigtableRestTransport - - request_init = {} - request_init["table_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, +def test_read_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).read_rows._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["tableName"] = "table_name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).read_rows._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" - - client = BigtableClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = bigtable.ReadRowsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = bigtable.ReadRowsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Ensure method has been cached + assert client._transport.read_rows in client._transport._wrapped_methods - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.read_rows(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.read_rows] = mock_rpc - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + request = {} + client.read_rows(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_read_rows_rest_unset_required_fields(): - transport = transports.BigtableRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + client.read_rows(request) - unset_fields = transport.read_rows._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("tableName",))) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3703,95 +5063,40 @@ def test_sample_row_keys_rest(request_type): assert response.offset_bytes == 1293 -def test_sample_row_keys_rest_required_fields( - request_type=bigtable.SampleRowKeysRequest, -): - transport_class = transports.BigtableRestTransport - - request_init = {} - request_init["table_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - use_integers_for_enums=False, +def test_sample_row_keys_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).sample_row_keys._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["tableName"] = "table_name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).sample_row_keys._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("app_profile_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" - - client = BigtableClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = bigtable.SampleRowKeysResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = bigtable.SampleRowKeysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Ensure method has been cached + assert client._transport.sample_row_keys in client._transport._wrapped_methods - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.sample_row_keys(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.sample_row_keys] = mock_rpc - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + request = {} + client.sample_row_keys(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_sample_row_keys_rest_unset_required_fields(): - transport = transports.BigtableRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + client.sample_row_keys(request) - unset_fields = transport.sample_row_keys._get_unset_required_fields({}) - assert set(unset_fields) == (set(("appProfileId",)) & set(("tableName",))) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3979,11 +5284,46 @@ def test_mutate_row_rest(request_type): assert isinstance(response, bigtable.MutateRowResponse) +def test_mutate_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mutate_row in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mutate_row] = mock_rpc + + request = {} + client.mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mutate_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest): transport_class = transports.BigtableRestTransport request_init = {} - request_init["table_name"] = "" request_init["row_key"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -4003,7 +5343,6 @@ def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest) # verify required fields with default values are now present - jsonified_request["tableName"] = "table_name_value" jsonified_request["rowKey"] = b"row_key_blob" unset_fields = transport_class( @@ -4012,8 +5351,6 @@ def test_mutate_row_rest_required_fields(request_type=bigtable.MutateRowRequest) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" assert "rowKey" in jsonified_request assert jsonified_request["rowKey"] == b"row_key_blob" @@ -4069,7 +5406,6 @@ def test_mutate_row_rest_unset_required_fields(): set(()) & set( ( - "tableName", "rowKey", "mutations", ) @@ -4277,11 +5613,46 @@ def test_mutate_rows_rest(request_type): assert isinstance(response, bigtable.MutateRowsResponse) +def test_mutate_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.mutate_rows in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.mutate_rows] = mock_rpc + + request = {} + client.mutate_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.mutate_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsRequest): transport_class = transports.BigtableRestTransport request_init = {} - request_init["table_name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4300,16 +5671,12 @@ def test_mutate_rows_rest_required_fields(request_type=bigtable.MutateRowsReques # verify required fields with default values are now present - jsonified_request["tableName"] = "table_name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).mutate_rows._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" client = BigtableClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4362,15 +5729,7 @@ def test_mutate_rows_rest_unset_required_fields(): ) unset_fields = transport.mutate_rows._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "tableName", - "entries", - ) - ) - ) + assert set(unset_fields) == (set(()) & set(("entries",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4563,13 +5922,52 @@ def test_check_and_mutate_row_rest(request_type): assert response.predicate_matched is True +def test_check_and_mutate_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.check_and_mutate_row in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.check_and_mutate_row + ] = mock_rpc + + request = {} + client.check_and_mutate_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.check_and_mutate_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_check_and_mutate_row_rest_required_fields( request_type=bigtable.CheckAndMutateRowRequest, ): transport_class = transports.BigtableRestTransport request_init = {} - request_init["table_name"] = "" request_init["row_key"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -4589,7 +5987,6 @@ def test_check_and_mutate_row_rest_required_fields( # verify required fields with default values are now present - jsonified_request["tableName"] = "table_name_value" jsonified_request["rowKey"] = b"row_key_blob" unset_fields = transport_class( @@ -4598,8 +5995,6 @@ def test_check_and_mutate_row_rest_required_fields( jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" assert "rowKey" in jsonified_request assert jsonified_request["rowKey"] == b"row_key_blob" @@ -4651,15 +6046,7 @@ def test_check_and_mutate_row_rest_unset_required_fields(): ) unset_fields = transport.check_and_mutate_row._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "tableName", - "rowKey", - ) - ) - ) + assert set(unset_fields) == (set(()) & set(("rowKey",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4889,6 +6276,42 @@ def test_ping_and_warm_rest(request_type): assert isinstance(response, bigtable.PingAndWarmResponse) +def test_ping_and_warm_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.ping_and_warm in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.ping_and_warm] = mock_rpc + + request = {} + client.ping_and_warm(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.ping_and_warm(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_ping_and_warm_rest_required_fields(request_type=bigtable.PingAndWarmRequest): transport_class = transports.BigtableRestTransport @@ -5151,13 +6574,53 @@ def test_read_modify_write_row_rest(request_type): assert isinstance(response, bigtable.ReadModifyWriteRowResponse) +def test_read_modify_write_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.read_modify_write_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.read_modify_write_row + ] = mock_rpc + + request = {} + client.read_modify_write_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read_modify_write_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_read_modify_write_row_rest_required_fields( request_type=bigtable.ReadModifyWriteRowRequest, ): transport_class = transports.BigtableRestTransport request_init = {} - request_init["table_name"] = "" request_init["row_key"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -5177,7 +6640,6 @@ def test_read_modify_write_row_rest_required_fields( # verify required fields with default values are now present - jsonified_request["tableName"] = "table_name_value" jsonified_request["rowKey"] = b"row_key_blob" unset_fields = transport_class( @@ -5186,8 +6648,6 @@ def test_read_modify_write_row_rest_required_fields( jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "tableName" in jsonified_request - assert jsonified_request["tableName"] == "table_name_value" assert "rowKey" in jsonified_request assert jsonified_request["rowKey"] == b"row_key_blob" @@ -5243,7 +6703,6 @@ def test_read_modify_write_row_rest_unset_required_fields(): set(()) & set( ( - "tableName", "rowKey", "rules", ) @@ -5447,6 +6906,47 @@ def test_generate_initial_change_stream_partitions_rest(request_type): assert isinstance(response, bigtable.GenerateInitialChangeStreamPartitionsResponse) +def test_generate_initial_change_stream_partitions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_initial_change_stream_partitions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_initial_change_stream_partitions + ] = mock_rpc + + request = {} + client.generate_initial_change_stream_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_initial_change_stream_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_generate_initial_change_stream_partitions_rest_required_fields( request_type=bigtable.GenerateInitialChangeStreamPartitionsRequest, ): @@ -5750,6 +7250,46 @@ def test_read_change_stream_rest(request_type): assert isinstance(response, bigtable.ReadChangeStreamResponse) +def test_read_change_stream_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BigtableClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.read_change_stream in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.read_change_stream + ] = mock_rpc + + request = {} + client.read_change_stream(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read_change_stream(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_read_change_stream_rest_required_fields( request_type=bigtable.ReadChangeStreamRequest, ): @@ -6568,9 +8108,40 @@ def test_bigtable_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_instance_path(): +def test_authorized_view_path(): project = "squid" instance = "clam" + table = "whelk" + authorized_view = "octopus" + expected = "projects/{project}/instances/{instance}/tables/{table}/authorizedViews/{authorized_view}".format( + project=project, + instance=instance, + table=table, + authorized_view=authorized_view, + ) + actual = BigtableClient.authorized_view_path( + project, instance, table, authorized_view + ) + assert expected == actual + + +def test_parse_authorized_view_path(): + expected = { + "project": "oyster", + "instance": "nudibranch", + "table": "cuttlefish", + "authorized_view": "mussel", + } + path = BigtableClient.authorized_view_path(**expected) + + # Check that the path construction is reversible. + actual = BigtableClient.parse_authorized_view_path(path) + assert expected == actual + + +def test_instance_path(): + project = "winkle" + instance = "nautilus" expected = "projects/{project}/instances/{instance}".format( project=project, instance=instance, @@ -6581,8 +8152,8 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "whelk", - "instance": "octopus", + "project": "scallop", + "instance": "abalone", } path = BigtableClient.instance_path(**expected) @@ -6592,9 +8163,9 @@ def test_parse_instance_path(): def test_table_path(): - project = "oyster" - instance = "nudibranch" - table = "cuttlefish" + project = "squid" + instance = "clam" + table = "whelk" expected = "projects/{project}/instances/{instance}/tables/{table}".format( project=project, instance=instance, @@ -6606,9 +8177,9 @@ def test_table_path(): def test_parse_table_path(): expected = { - "project": "mussel", - "instance": "winkle", - "table": "nautilus", + "project": "octopus", + "instance": "oyster", + "table": "nudibranch", } path = BigtableClient.table_path(**expected) @@ -6618,7 +8189,7 @@ def test_parse_table_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6628,7 +8199,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "mussel", } path = BigtableClient.common_billing_account_path(**expected) @@ -6638,7 +8209,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -6648,7 +8219,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "nautilus", } path = BigtableClient.common_folder_path(**expected) @@ -6658,7 +8229,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -6668,7 +8239,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "abalone", } path = BigtableClient.common_organization_path(**expected) @@ -6678,7 +8249,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -6688,7 +8259,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "clam", } path = BigtableClient.common_project_path(**expected) @@ -6698,8 +8269,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6710,8 +8281,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "oyster", + "location": "nudibranch", } path = BigtableClient.common_location_path(**expected) diff --git a/tests/unit/v2_client/test_client.py b/tests/unit/v2_client/test_client.py index b6eb6ac96..4338f8553 100644 --- a/tests/unit/v2_client/test_client.py +++ b/tests/unit/v2_client/test_client.py @@ -173,10 +173,13 @@ def test_client_constructor_w_emulator_host(): from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + import grpc emulator_host = "localhost:8081" with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.insecure_channel") as factory: + channel = grpc.insecure_channel("no-host") + with mock.patch("grpc.insecure_channel", return_value=channel) as factory: + factory.return_value = channel client = _make_client() # don't test local_composite_credentials # client._local_composite_credentials = lambda: credentials @@ -195,10 +198,12 @@ def test_client_constructor_w_emulator_host(): def test_client_constructor_w_emulator_host_w_project(): from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + import grpc emulator_host = "localhost:8081" with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.insecure_channel") as factory: + channel = grpc.insecure_channel("no-host") + with mock.patch("grpc.insecure_channel", return_value=channel) as factory: client = _make_client(project=PROJECT) # channels are formed when needed, so access a client # create a gapic channel @@ -216,11 +221,13 @@ def test_client_constructor_w_emulator_host_w_credentials(): from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + import grpc emulator_host = "localhost:8081" credentials = _make_credentials() with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.insecure_channel") as factory: + channel = grpc.insecure_channel("no-host") + with mock.patch("grpc.insecure_channel", return_value=channel) as factory: client = _make_client(credentials=credentials) # channels are formed when needed, so access a client # create a gapic channel