Skip to content

Commit

Permalink
collect backoff details from other rpcs
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche committed Dec 14, 2023
1 parent a46183e commit 65b5485
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
4 changes: 3 additions & 1 deletion google/cloud/bigtable/data/_async/_mutate_rows.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from google.cloud.bigtable.data._helpers import _make_metadata
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
from google.cloud.bigtable.data._helpers import _retry_exception_factory
from google.cloud.bigtable.data._helpers import backoff_generator

# mutate_rows requests are limited to this number of mutations
from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
Expand Down Expand Up @@ -104,7 +105,7 @@ def __init__(
# Entry level errors
bt_exceptions._MutateRowsIncomplete,
)
sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
sleep_generator = backoff_generator(0.01, 2, 60)
self._operation = retries.retry_target_async(
self._run_attempt,
self.is_retryable,
Expand All @@ -120,6 +121,7 @@ def __init__(
self.remaining_indices = list(range(len(self.mutations)))
self.errors: dict[int, list[Exception]] = {}
# set up metrics
metrics.backoff_generator = sleep_generator
self._operation_metrics = metrics

async def start(self):
Expand Down
10 changes: 5 additions & 5 deletions google/cloud/bigtable/data/_async/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
from google.cloud.bigtable.data._helpers import _get_retryable_errors
from google.cloud.bigtable.data._helpers import _get_timeouts
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
from google.cloud.bigtable.data._helpers import backoff_generator
from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE
from google.cloud.bigtable.data.read_modify_write_rules import ReadModifyWriteRule
Expand Down Expand Up @@ -905,14 +906,14 @@ async def sample_row_keys(
retryable_excs = _get_retryable_errors(retryable_errors, self)
predicate = retries.if_exception_type(*retryable_excs)

sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
sleep_generator = backoff_generator(0.01, 2, 60)

# prepare request
metadata = _make_metadata(self.table_name, self.app_profile_id)

# wrap rpc in retry and metric collection logic
async with self._metrics.create_operation(
OperationType.SAMPLE_ROW_KEYS
OperationType.SAMPLE_ROW_KEYS, backoff_generator=sleep_generator
) as operation:

async def execute_rpc():
Expand Down Expand Up @@ -1050,12 +1051,11 @@ async def mutate_row(
# mutations should not be retried
predicate = retries.if_exception_type()

sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)

sleep_generator = backoff_generator(0.01, 2, 60)

# wrap rpc in retry and metric collection logic
async with self._metrics.create_operation(
OperationType.MUTATE_ROW
OperationType.MUTATE_ROW, backoff_generator=sleep_generator
) as operation:
metric_wrapped = operation.wrap_attempt_fn(
self.client._gapic_client.mutate_row
Expand Down

0 comments on commit 65b5485

Please sign in to comment.