Skip to content

Commit

Permalink
Merge branch 'main' into parthea-patch-2
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche authored Dec 19, 2024
2 parents ad874ea + 9c6f79d commit 9a16d55
Show file tree
Hide file tree
Showing 91 changed files with 12,272 additions and 3,391 deletions.
4 changes: 3 additions & 1 deletion .cross_sync/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,12 @@ CrossSync provides a set of annotations to mark up async classes, to guide the g

### Code Generation

Generation can be initiated using `python .cross_sync/generate.py .`
Generation can be initiated using `nox -s generate_sync`
from the root of the project. This will find all classes with the `__CROSS_SYNC_OUTPUT__ = "path/to/output"`
annotation, and generate a sync version of classes marked with `@CrossSync.convert_sync` at the output path.

There is a unit test at `tests/unit/data/test_sync_up_to_date.py` that verifies that the generated code is up to date

## Architecture

CrossSync is made up of two parts:
Expand Down
3 changes: 2 additions & 1 deletion .github/.OwlBot.lock.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562
digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737
# created: 2024-12-17T00:59:58.625514486Z
14 changes: 12 additions & 2 deletions .github/workflows/conformance.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,17 @@ jobs:
matrix:
test-version: [ "v0.0.2" ]
py-version: [ 3.8 ]
client-type: [ "Async v3", "Legacy" ]
client-type: [ "async", "sync", "legacy" ]
include:
- client-type: "sync"
# sync client does not support concurrent streams
test_args: "-skip _Generic_MultiStream"
- client-type: "legacy"
# legacy client is synchronous and does not support concurrent streams
# legacy client does not expose mutate_row. Disable those tests
test_args: "-skip _Generic_MultiStream -skip TestMutateRow_"
fail-fast: false
name: "${{ matrix.client-type }} Client / Python ${{ matrix.py-version }} / Test Tag ${{ matrix.test-version }}"
name: "${{ matrix.client-type }} client / python ${{ matrix.py-version }} / test tag ${{ matrix.test-version }}"
steps:
- uses: actions/checkout@v4
name: "Checkout python-bigtable"
Expand All @@ -53,4 +61,6 @@ jobs:
env:
CLIENT_TYPE: ${{ matrix.client-type }}
PYTHONUNBUFFERED: 1
TEST_ARGS: ${{ matrix.test_args }}
PROXY_PORT: 9999

13 changes: 2 additions & 11 deletions .kokoro/conformance.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,9 @@ set -eo pipefail
## cd to the parent directory, i.e. the root of the git repo
cd $(dirname $0)/..

PROXY_ARGS=""
TEST_ARGS=""
if [[ "${CLIENT_TYPE^^}" == "LEGACY" ]]; then
echo "Using legacy client"
PROXY_ARGS="--legacy-client"
# legacy client does not expose mutate_row. Disable those tests
TEST_ARGS="-skip TestMutateRow_"
fi

# Build and start the proxy in a separate process
PROXY_PORT=9999
pushd test_proxy
nohup python test_proxy.py --port $PROXY_PORT $PROXY_ARGS &
nohup python test_proxy.py --port $PROXY_PORT --client_type=$CLIENT_TYPE &
proxyPID=$!
popd

Expand All @@ -43,6 +33,7 @@ function cleanup() {
trap cleanup EXIT

# Run the conformance test
echo "running tests with args: $TEST_ARGS"
pushd cloud-bigtable-clients-test/tests
eval "go test -v -proxy_addr=:$PROXY_PORT $TEST_ARGS"
RETURN_CODE=$?
Expand Down
52 changes: 41 additions & 11 deletions .kokoro/docker/docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements.in
# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in
#
argcomplete==3.5.1 \
--hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \
--hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4
argcomplete==3.5.2 \
--hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \
--hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb
# via nox
colorlog==6.9.0 \
--hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \
Expand All @@ -23,7 +23,7 @@ filelock==3.16.1 \
nox==2024.10.9 \
--hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \
--hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95
# via -r requirements.in
# via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in
packaging==24.2 \
--hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
--hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
Expand All @@ -32,11 +32,41 @@ platformdirs==4.3.6 \
--hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
--hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
# via virtualenv
tomli==2.0.2 \
--hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \
--hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed
tomli==2.2.1 \
--hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
--hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
--hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \
--hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \
--hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \
--hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \
--hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \
--hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \
--hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \
--hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \
--hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \
--hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \
--hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \
--hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \
--hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \
--hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \
--hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \
--hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \
--hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \
--hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \
--hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \
--hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \
--hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \
--hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \
--hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \
--hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \
--hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \
--hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \
--hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \
--hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \
--hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
--hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
# via nox
virtualenv==20.27.1 \
--hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \
--hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4
virtualenv==20.28.0 \
--hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \
--hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa
# via nox
18 changes: 0 additions & 18 deletions docs/async_data_client/async_data_usage.rst

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ Bigtable Data Client Async
performance benefits, the codebase should be designed to be async from the ground up.


.. autoclass:: google.cloud.bigtable.data._async.client.BigtableDataClientAsync
.. autoclass:: google.cloud.bigtable.data.BigtableDataClientAsync
:members:
:show-inheritance:
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
39 changes: 39 additions & 0 deletions docs/data_client/data_client_usage.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
Data Client
===========

Sync Surface
------------

.. toctree::
:maxdepth: 3

sync_data_client
sync_data_table
sync_data_mutations_batcher
sync_data_execute_query_iterator

Async Surface
-------------

.. toctree::
:maxdepth: 3

async_data_client
async_data_table
async_data_mutations_batcher
async_data_execute_query_iterator

Common Classes
--------------

.. toctree::
:maxdepth: 3

common_data_read_rows_query
common_data_row
common_data_row_filters
common_data_mutations
common_data_read_modify_write_rules
common_data_exceptions
common_data_execute_query_values
common_data_execute_query_metadata
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_client.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Bigtable Data Client
~~~~~~~~~~~~~~~~~~~~

.. autoclass:: google.cloud.bigtable.data.BigtableDataClient
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_execute_query_iterator.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Execute Query Iterator
~~~~~~~~~~~~~~~~~~~~~~

.. autoclass:: google.cloud.bigtable.data.execute_query.ExecuteQueryIterator
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_mutations_batcher.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Mutations Batcher
~~~~~~~~~~~~~~~~~

.. automodule:: google.cloud.bigtable.data._sync_autogen.mutations_batcher
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_table.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Table
~~~~~

.. autoclass:: google.cloud.bigtable.data.Table
:members:
:show-inheritance:
4 changes: 2 additions & 2 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
Client Types
-------------
.. toctree::
:maxdepth: 2
:maxdepth: 3

data_client/data_client_usage
classic_client/usage
async_data_client/async_data_usage


Changelog
Expand Down
9 changes: 4 additions & 5 deletions docs/scripts/patch_devsite_toc.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,8 @@ def __init__(self, dir_name, index_file_name):
continue
# bail when toc indented block is done
if not line.startswith(" ") and not line.startswith("\t"):
break
in_toc = False
continue
# extract entries
self.items.append(self.extract_toc_entry(line.strip()))

Expand Down Expand Up @@ -194,9 +195,7 @@ def validate_toc(toc_file_path, expected_section_list, added_sections):
# Add secrtions for the async_data_client and classic_client directories
toc_path = "_build/html/docfx_yaml/toc.yml"
custom_sections = [
TocSection(
dir_name="async_data_client", index_file_name="async_data_usage.rst"
),
TocSection(dir_name="data_client", index_file_name="data_client_usage.rst"),
TocSection(dir_name="classic_client", index_file_name="usage.rst"),
]
add_sections(toc_path, custom_sections)
Expand All @@ -210,7 +209,7 @@ def validate_toc(toc_file_path, expected_section_list, added_sections):
"bigtable APIs",
"Changelog",
"Multiprocessing",
"Async Data Client",
"Data Client",
"Classic Client",
],
added_sections=custom_sections,
Expand Down
32 changes: 30 additions & 2 deletions google/cloud/bigtable/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@

from google.cloud.bigtable.data._async.client import BigtableDataClientAsync
from google.cloud.bigtable.data._async.client import TableAsync

from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
from google.cloud.bigtable.data._sync_autogen.client import BigtableDataClient
from google.cloud.bigtable.data._sync_autogen.client import Table
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import MutationsBatcher

from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
from google.cloud.bigtable.data.read_rows_query import RowRange
Expand All @@ -45,16 +47,42 @@
from google.cloud.bigtable.data._helpers import RowKeySamples
from google.cloud.bigtable.data._helpers import ShardedQuery

# setup custom CrossSync mappings for library
from google.cloud.bigtable_v2.services.bigtable.async_client import (
BigtableAsyncClient,
)
from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync

from google.cloud.bigtable_v2.services.bigtable.client import (
BigtableClient,
)
from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation
from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation

from google.cloud.bigtable.data._cross_sync import CrossSync

CrossSync.add_mapping("GapicClient", BigtableAsyncClient)
CrossSync._Sync_Impl.add_mapping("GapicClient", BigtableClient)
CrossSync.add_mapping("_ReadRowsOperation", _ReadRowsOperationAsync)
CrossSync._Sync_Impl.add_mapping("_ReadRowsOperation", _ReadRowsOperation)
CrossSync.add_mapping("_MutateRowsOperation", _MutateRowsOperationAsync)
CrossSync._Sync_Impl.add_mapping("_MutateRowsOperation", _MutateRowsOperation)
CrossSync.add_mapping("MutationsBatcher", MutationsBatcherAsync)
CrossSync._Sync_Impl.add_mapping("MutationsBatcher", MutationsBatcher)

__version__: str = package_version.__version__

__all__ = (
"BigtableDataClientAsync",
"TableAsync",
"MutationsBatcherAsync",
"BigtableDataClient",
"Table",
"MutationsBatcher",
"RowKeySamples",
"ReadRowsQuery",
"RowRange",
"MutationsBatcherAsync",
"Mutation",
"RowMutationEntry",
"SetCell",
Expand Down
Loading

0 comments on commit 9a16d55

Please sign in to comment.