Skip to content

Commit

Permalink
allow more timed out requests in test
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche committed Dec 5, 2024
1 parent b3b63c4 commit 2f40ea2
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 4 deletions.
8 changes: 6 additions & 2 deletions tests/unit/data/_async/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2005,6 +2005,7 @@ async def test_read_rows_sharded_negative_batch_timeout(self):
They should raise DeadlineExceeded errors
"""
from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
from google.api_core.exceptions import DeadlineExceeded

async def mock_call(*args, **kwargs):
Expand All @@ -2015,11 +2016,14 @@ async def mock_call(*args, **kwargs):
async with client.get_table("instance", "table") as table:
with mock.patch.object(table, "read_rows") as read_rows:
read_rows.side_effect = mock_call
queries = [ReadRowsQuery() for _ in range(15)]
num_calls = 15
queries = [ReadRowsQuery() for _ in range(num_calls)]
with pytest.raises(ShardedReadRowsExceptionGroup) as exc:
await table.read_rows_sharded(queries, operation_timeout=0.01)
assert isinstance(exc.value, ShardedReadRowsExceptionGroup)
assert len(exc.value.exceptions) == 5
# _CONCURRENCY_LIMIT calls will run, and won't be interrupted
# calls after the limit will be cancelled due to timeout
assert len(exc.value.exceptions) >= num_calls - _CONCURRENCY_LIMIT
assert all(
isinstance(e.__cause__, DeadlineExceeded)
for e in exc.value.exceptions
Expand Down
6 changes: 4 additions & 2 deletions tests/unit/data/_sync_autogen/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1665,6 +1665,7 @@ def test_read_rows_sharded_negative_batch_timeout(self):
They should raise DeadlineExceeded errors"""
from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup
from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
from google.api_core.exceptions import DeadlineExceeded

def mock_call(*args, **kwargs):
Expand All @@ -1675,11 +1676,12 @@ def mock_call(*args, **kwargs):
with client.get_table("instance", "table") as table:
with mock.patch.object(table, "read_rows") as read_rows:
read_rows.side_effect = mock_call
queries = [ReadRowsQuery() for _ in range(15)]
num_calls = 15
queries = [ReadRowsQuery() for _ in range(num_calls)]
with pytest.raises(ShardedReadRowsExceptionGroup) as exc:
table.read_rows_sharded(queries, operation_timeout=0.01)
assert isinstance(exc.value, ShardedReadRowsExceptionGroup)
assert len(exc.value.exceptions) == 5
assert len(exc.value.exceptions) >= num_calls - _CONCURRENCY_LIMIT
assert all(
(
isinstance(e.__cause__, DeadlineExceeded)
Expand Down

0 comments on commit 2f40ea2

Please sign in to comment.