From bbba50d220b76b2a36d364697d26936df461dc8d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Mar 2024 15:57:51 -0800 Subject: [PATCH 01/24] added new file for data client samples --- .../data_client/data_client_tests_async.py | 26 ++ samples/snippets/data_client/noxfile.py | 292 ++++++++++++++++++ .../data_client/requirements-test.txt | 1 + samples/snippets/data_client/requirements.txt | 1 + 4 files changed, 320 insertions(+) create mode 100644 samples/snippets/data_client/data_client_tests_async.py create mode 100644 samples/snippets/data_client/noxfile.py create mode 100644 samples/snippets/data_client/requirements-test.txt create mode 100644 samples/snippets/data_client/requirements.txt diff --git a/samples/snippets/data_client/data_client_tests_async.py b/samples/snippets/data_client/data_client_tests_async.py new file mode 100644 index 000000000..1d0fb7ba8 --- /dev/null +++ b/samples/snippets/data_client/data_client_tests_async.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python + +# Copyright 2023, Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +async def set_cell(): + # [START bigtable_data_set_cell] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import SetCell + async with BigtableDataClientAsync(project="my-project") as client: + async with client.get_table(instance_id='my-instance', table_id='my-table') as table: + row_key = b"my_row" + mutation = SetCell(family="family", qualifier="qualifier", new_value="value") + await table.mutate_row(row_key, mutation) + # [END bigtable_data_set_cell] diff --git a/samples/snippets/data_client/noxfile.py b/samples/snippets/data_client/noxfile.py new file mode 100644 index 000000000..483b55901 --- /dev/null +++ b/samples/snippets/data_client/noxfile.py @@ -0,0 +1,292 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8") + else: + session.install("flake8", "flake8-annotations") + + args = FLAKE8_COMMON_ARGS + [ + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/samples/snippets/data_client/requirements-test.txt b/samples/snippets/data_client/requirements-test.txt new file mode 100644 index 000000000..8075a1ec5 --- /dev/null +++ b/samples/snippets/data_client/requirements-test.txt @@ -0,0 +1 @@ +pytest==8.0.0 diff --git a/samples/snippets/data_client/requirements.txt b/samples/snippets/data_client/requirements.txt new file mode 100644 index 000000000..835e1bc78 --- /dev/null +++ b/samples/snippets/data_client/requirements.txt @@ -0,0 +1 @@ +google-cloud-bigtable==2.23.0 From e102ef5ec28e3a029b1d2b1e31e804f67d1e3f0d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Mar 2024 16:57:50 -0800 Subject: [PATCH 02/24] improved test infra --- ...async.py => data_client_snippets_async.py} | 32 ++++++++--- .../data_client_snippets_async_test.py | 57 +++++++++++++++++++ .../data_client/requirements-test.txt | 1 + 3 files changed, 83 insertions(+), 7 deletions(-) rename samples/snippets/data_client/{data_client_tests_async.py => data_client_snippets_async.py} (50%) create mode 100644 samples/snippets/data_client/data_client_snippets_async_test.py diff --git a/samples/snippets/data_client/data_client_tests_async.py b/samples/snippets/data_client/data_client_snippets_async.py similarity index 50% rename from samples/snippets/data_client/data_client_tests_async.py rename to samples/snippets/data_client/data_client_snippets_async.py index 1d0fb7ba8..4bc504045 100644 --- a/samples/snippets/data_client/data_client_tests_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -14,13 +14,31 @@ # limitations under the License. -async def set_cell(): - # [START bigtable_data_set_cell] +async def create_table(project, instance, table): + # [START bigtable_data_create_table] from google.cloud.bigtable.data import BigtableDataClientAsync + + project_id = 'my_project' + instance_id = 'my-instance' + table_id = 'my-table' + # [END bigtable_data_create_table] + # replace placeholders outside sample + project_id, instance_id, table_id = project, instance, table + # [START bigtable_data_create_table] + + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + print(f"table: {table}") + # [END bigtable_data_create_table] + + +async def set_cell(table): + # [START bigtable_data_set_cell] from google.cloud.bigtable.data import SetCell - async with BigtableDataClientAsync(project="my-project") as client: - async with client.get_table(instance_id='my-instance', table_id='my-table') as table: - row_key = b"my_row" - mutation = SetCell(family="family", qualifier="qualifier", new_value="value") - await table.mutate_row(row_key, mutation) + + row_key = b"my_row" + mutation = SetCell( + family="family", qualifier="qualifier", new_value="value" + ) + await table.mutate_row(row_key, mutation) # [END bigtable_data_set_cell] diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py new file mode 100644 index 000000000..ba678ccb9 --- /dev/null +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -0,0 +1,57 @@ +# Copyright 2023, Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest +import pytest_asyncio +import uuid +import os +import asyncio + +import data_client_snippets_async as data_snippets + + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_STATIC = os.getenv('BIGTABLE_TABLE', None) # if not set, a temproary table will be generated + + +@pytest.fixture(scope='session') +def table_id(): + from google.cloud import bigtable + + client = bigtable.Client(project=PROJECT, admin=True) + instance = client.instance(BIGTABLE_INSTANCE) + table_id = TABLE_ID_STATIC or f"data-client-{str(uuid.uuid4())[:16]}" + + admin_table = instance.table(table_id) + if not admin_table.exists(): + admin_table.create(column_families={"family": None}) + + yield table_id + + if not table_id == TABLE_ID_STATIC: + # clean up table when finished + admin_table.delete() + + +@pytest_asyncio.fixture +async def table(table_id): + from google.cloud.bigtable.data import BigtableDataClientAsync + async with BigtableDataClientAsync(project=PROJECT) as client: + async with client.get_table(BIGTABLE_INSTANCE, table_id) as table: + yield table + + +@pytest.mark.asyncio +async def test_set_cell(table): + from google.cloud.bigtable.data import BigtableDataClientAsync + await data_snippets.set_cell(table) diff --git a/samples/snippets/data_client/requirements-test.txt b/samples/snippets/data_client/requirements-test.txt index 8075a1ec5..bbeed6cc5 100644 --- a/samples/snippets/data_client/requirements-test.txt +++ b/samples/snippets/data_client/requirements-test.txt @@ -1 +1,2 @@ pytest==8.0.0 +pytest-asyncio From 637f90584f61663c82bb2b27c5839cdc3b428140 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Mar 2024 17:33:56 -0800 Subject: [PATCH 03/24] added samples --- .../data_client/data_client_snippets_async.py | 159 ++++++++++++++++++ 1 file changed, 159 insertions(+) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index 4bc504045..a6b6cad1f 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -42,3 +42,162 @@ async def set_cell(table): ) await table.mutate_row(row_key, mutation) # [END bigtable_data_set_cell] + + +async def bulk_mutate(table): + # [START bigtable_data_bulk_mutate] + from google.cloud.bigtable.data.mutations import SetCell + from google.cloud.bigtable.data.mutations import RowMutationEntry + + row_key_1 = b"first_row" + row_key_2 = b"second_row" + + common_mutation = SetCell( + family="family", qualifier="qualifier", new_value="value" + ) + await table.bulk_mutate([ + RowMutationEntry(row_key_1, [common_mutation]), + RowMutationEntry(row_key_2, [common_mutation]), + ]) + # [END bigtable_data_bulk_mutate] + + +async def mutations_batcher(table): + # [START bigtable_data_mutations_batcher] + from google.cloud.bigtable.data.mutations import SetCell + from google.cloud.bigtable.data import RowMutationEntry + + common_mutation = SetCell( + family="family", qualifier="qualifier", new_value="value" + ) + + async with table.mutations_batcher( + flush_count=2, max_row_bytes=1024 + ) as batcher: + for i in range(10): + row_key = f"row-{i}" + batcher.append(RowMutationEntry(row_key, [common_mutation])) + # [END bigtable_data_mutations_batcher] + +async def read_row(table): + # [START bigtable_data_read_row] + row = await table.read_row(b"my_row") + print(row.row_key) + # [END bigtable_data_read_row] + +async def read_rows_list(table): + # [START bigtable_data_read_rows_list] + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import RowRange + + query = ReadRowsQuery(row_ranges=[RowRange("start", "end")]) + + row_list = await table.read_rows(query) + for row in row_list: + print(row.row_key) + # [END bigtable_data_read_rows_list] + +async def read_rows_stream(table): + # [START bigtable_data_read_rows_stream] + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import RowRange + + query = ReadRowsQuery(row_ranges=[RowRange("start", "end")]) + for row in await table.read_rows_stream(query): + print(row.row_key) + # [END bigtable_data_read_rows_stream] + +async def read_rows_sharded(table): + # [START bigtable_data_read_rows_sharded] + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import RowRange + + # find shard keys for table + table_shard_keys = await table.sample_row_keys() + # construct shared query + query = ReadRowsQuery(row_ranges=[RowRange("start", "end")]) + shard_queries = query.shard(table_shard_keys) + # execute sharded query + row_list = await table.read_rows_sharded(shard_queries) + for row in row_list: + print(row.row_key) + # [END bigtable_data_read_rows_sharded] + +async def row_exists(table): + # [START bigtable_data_row_exists] + row_key = b"my_row" + exists = await table.row_exists(row_key) + if exists: + print(f"The row {row_key} exists") + else: + print(f"The row {row_key} does not exist") + # [END bigtable_data_row_exists] + +async def read_modify_write_increment(table): + # [START bigtable_data_read_modify_write_increment] + from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule + from google.cloud.bigtable.data import SetCell + + row_key = b"my_row" + family = "family" + qualifier = "qualifier" + + # initialize row with a starting value of 1 + await table.mutate_row(row_key, SetCell(family, qualifier, new_value=1)) + + # use read_modify_write to increment the value by 2 + add_two_rule = IncrementRule(family, qualifier, increment=2) + result = await table.read_modify_write_row(row_key, add_two_rule) + + # check result + cell = result[0] + print(cell.value) + assert cell.value == 3 + # [END bigtable_data_read_modify_write_increment] + +async def read_modify_write_append(table): + # [START bigtable_data_read_modify_write_append] + from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule + from google.cloud.bigtable.data import SetCell + + row_key = b"my_row" + family = "family" + qualifier = "qualifier" + + # initialize row with a starting value of "hello" + await table.mutate_row(row_key, SetCell(family, qualifier, new_value="hello")) + + # use read_modify_write to append " world" to the value + append_world_rule = AppendValueRule(family, qualifier, value=" world") + result = await table.read_modify_write_row(row_key, append_world_rule) + + # check result + cell = result[0] + print(cell.value) + assert cell.value == "hello world" + # [END bigtable_data_read_modify_append] + +async def check_and_mutate(table): + # [START bigtable_data_check_and_mutate] + from google.cloud.bigtable.data.row_filters import ValueRangeFilter + from google.cloud.bigtable.data import SetCell + + row_key = b"my_row" + family = "family" + qualifier = "qualifier" + + # create a predicate filter to test against + # in this case, use a ValueRangeFilter to check if the value is positive or negative + predicate = ValueRangeFilter(start_value=0, inclusive_start=True) + # use check and mutate to change the value in the row based on the predicate + was_true = await table.check_and_mutate_row( + row_key, + predicate, + true_case_mutations=SetCell(family, qualifier, new_value="positive"), + false_case_mutations=SetCell(family, qualifier, new_value="negative"), + ) + if was_true: + print("The value was positive") + else: + print("The value was negative") + # [END bigtable_data_check_and_mutate] From 399524bbc9d70676991f6a25a19df95916b421e5 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 7 Mar 2024 15:46:11 -0800 Subject: [PATCH 04/24] got tests working --- .../data_client/data_client_snippets_async.py | 65 +++++++++-------- .../data_client_snippets_async_test.py | 72 ++++++++++++++++++- samples/snippets/data_client/noxfile.py | 15 ++-- 3 files changed, 114 insertions(+), 38 deletions(-) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index a6b6cad1f..ccfb4bd66 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -18,9 +18,9 @@ async def create_table(project, instance, table): # [START bigtable_data_create_table] from google.cloud.bigtable.data import BigtableDataClientAsync - project_id = 'my_project' - instance_id = 'my-instance' - table_id = 'my-table' + project_id = "my_project" + instance_id = "my-instance" + table_id = "my-table" # [END bigtable_data_create_table] # replace placeholders outside sample project_id, instance_id, table_id = project, instance, table @@ -29,7 +29,8 @@ async def create_table(project, instance, table): async with BigtableDataClientAsync(project=project_id) as client: async with client.get_table(instance_id, table_id) as table: print(f"table: {table}") - # [END bigtable_data_create_table] + # [END bigtable_data_create_table] + return table async def set_cell(table): @@ -37,9 +38,7 @@ async def set_cell(table): from google.cloud.bigtable.data import SetCell row_key = b"my_row" - mutation = SetCell( - family="family", qualifier="qualifier", new_value="value" - ) + mutation = SetCell(family="family", qualifier="qualifier", new_value="value") await table.mutate_row(row_key, mutation) # [END bigtable_data_set_cell] @@ -52,13 +51,13 @@ async def bulk_mutate(table): row_key_1 = b"first_row" row_key_2 = b"second_row" - common_mutation = SetCell( - family="family", qualifier="qualifier", new_value="value" + common_mutation = SetCell(family="family", qualifier="qualifier", new_value="value") + await table.bulk_mutate_rows( + [ + RowMutationEntry(row_key_1, [common_mutation]), + RowMutationEntry(row_key_2, [common_mutation]), + ] ) - await table.bulk_mutate([ - RowMutationEntry(row_key_1, [common_mutation]), - RowMutationEntry(row_key_2, [common_mutation]), - ]) # [END bigtable_data_bulk_mutate] @@ -67,46 +66,51 @@ async def mutations_batcher(table): from google.cloud.bigtable.data.mutations import SetCell from google.cloud.bigtable.data import RowMutationEntry - common_mutation = SetCell( - family="family", qualifier="qualifier", new_value="value" - ) + common_mutation = SetCell(family="family", qualifier="qualifier", new_value="value") async with table.mutations_batcher( - flush_count=2, max_row_bytes=1024 + flush_limit_mutation_count=2, flush_limit_bytes=1024 ) as batcher: for i in range(10): row_key = f"row-{i}" batcher.append(RowMutationEntry(row_key, [common_mutation])) - # [END bigtable_data_mutations_batcher] + # [END bigtable_data_mutations_batcher] + return batcher + async def read_row(table): # [START bigtable_data_read_row] row = await table.read_row(b"my_row") print(row.row_key) # [END bigtable_data_read_row] + return row + async def read_rows_list(table): # [START bigtable_data_read_rows_list] from google.cloud.bigtable.data import ReadRowsQuery from google.cloud.bigtable.data import RowRange - query = ReadRowsQuery(row_ranges=[RowRange("start", "end")]) + query = ReadRowsQuery(row_ranges=[RowRange("a", "z")]) row_list = await table.read_rows(query) for row in row_list: print(row.row_key) # [END bigtable_data_read_rows_list] + return row_list + async def read_rows_stream(table): # [START bigtable_data_read_rows_stream] from google.cloud.bigtable.data import ReadRowsQuery from google.cloud.bigtable.data import RowRange - query = ReadRowsQuery(row_ranges=[RowRange("start", "end")]) - for row in await table.read_rows_stream(query): + query = ReadRowsQuery(row_ranges=[RowRange("a", "z")]) + async for row in await table.read_rows_stream(query): print(row.row_key) # [END bigtable_data_read_rows_stream] + async def read_rows_sharded(table): # [START bigtable_data_read_rows_sharded] from google.cloud.bigtable.data import ReadRowsQuery @@ -115,7 +119,7 @@ async def read_rows_sharded(table): # find shard keys for table table_shard_keys = await table.sample_row_keys() # construct shared query - query = ReadRowsQuery(row_ranges=[RowRange("start", "end")]) + query = ReadRowsQuery(row_ranges=[RowRange("a", "z")]) shard_queries = query.shard(table_shard_keys) # execute sharded query row_list = await table.read_rows_sharded(shard_queries) @@ -123,6 +127,7 @@ async def read_rows_sharded(table): print(row.row_key) # [END bigtable_data_read_rows_sharded] + async def row_exists(table): # [START bigtable_data_row_exists] row_key = b"my_row" @@ -132,6 +137,8 @@ async def row_exists(table): else: print(f"The row {row_key} does not exist") # [END bigtable_data_row_exists] + return exists + async def read_modify_write_increment(table): # [START bigtable_data_read_modify_write_increment] @@ -146,15 +153,16 @@ async def read_modify_write_increment(table): await table.mutate_row(row_key, SetCell(family, qualifier, new_value=1)) # use read_modify_write to increment the value by 2 - add_two_rule = IncrementRule(family, qualifier, increment=2) + add_two_rule = IncrementRule(family, qualifier, increment_amount=2) result = await table.read_modify_write_row(row_key, add_two_rule) # check result cell = result[0] - print(cell.value) - assert cell.value == 3 + print(f"{cell.row_key} value: {int(cell)}") + assert int(cell) == 3 # [END bigtable_data_read_modify_write_increment] + async def read_modify_write_append(table): # [START bigtable_data_read_modify_write_append] from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule @@ -168,15 +176,16 @@ async def read_modify_write_append(table): await table.mutate_row(row_key, SetCell(family, qualifier, new_value="hello")) # use read_modify_write to append " world" to the value - append_world_rule = AppendValueRule(family, qualifier, value=" world") + append_world_rule = AppendValueRule(family, qualifier, append_value=" world") result = await table.read_modify_write_row(row_key, append_world_rule) # check result cell = result[0] - print(cell.value) - assert cell.value == "hello world" + print(f"{cell.row_key} value: {cell.value}") + assert cell.value == b"hello world" # [END bigtable_data_read_modify_append] + async def check_and_mutate(table): # [START bigtable_data_check_and_mutate] from google.cloud.bigtable.data.row_filters import ValueRangeFilter diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py index ba678ccb9..e85f13d47 100644 --- a/samples/snippets/data_client/data_client_snippets_async_test.py +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -21,10 +21,12 @@ PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] -TABLE_ID_STATIC = os.getenv('BIGTABLE_TABLE', None) # if not set, a temproary table will be generated +TABLE_ID_STATIC = os.getenv( + "BIGTABLE_TABLE", None +) # if not set, a temproary table will be generated -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def table_id(): from google.cloud import bigtable @@ -46,12 +48,76 @@ def table_id(): @pytest_asyncio.fixture async def table(table_id): from google.cloud.bigtable.data import BigtableDataClientAsync + async with BigtableDataClientAsync(project=PROJECT) as client: async with client.get_table(BIGTABLE_INSTANCE, table_id) as table: yield table +@pytest.mark.asyncio +async def test_create_table(table_id): + from google.cloud.bigtable.data import TableAsync + + result = await data_snippets.create_table(PROJECT, BIGTABLE_INSTANCE, table_id) + assert isinstance(result, TableAsync) + assert result.table_id == table_id + assert result.instance_id == BIGTABLE_INSTANCE + assert result.client.project == PROJECT + + @pytest.mark.asyncio async def test_set_cell(table): - from google.cloud.bigtable.data import BigtableDataClientAsync await data_snippets.set_cell(table) + + +@pytest.mark.asyncio +async def test_bulk_mutate(table): + await data_snippets.bulk_mutate(table) + + +@pytest.mark.asyncio +async def test_mutations_batcher(table): + from google.cloud.bigtable.data import MutationsBatcherAsync + + batcher = await data_snippets.mutations_batcher(table) + assert isinstance(batcher, MutationsBatcherAsync) + + +@pytest.mark.asyncio +async def test_read_row(table): + await data_snippets.read_row(table) + + +@pytest.mark.asyncio +async def test_read_rows_list(table): + await data_snippets.read_rows_list(table) + + +@pytest.mark.asyncio +async def test_read_rows_stream(table): + await data_snippets.read_rows_stream(table) + + +@pytest.mark.asyncio +async def test_read_rows_sharded(table): + await data_snippets.read_rows_sharded(table) + + +@pytest.mark.asyncio +async def test_row_exists(table): + await data_snippets.row_exists(table) + + +@pytest.mark.asyncio +async def test_read_modify_write_increment(table): + await data_snippets.read_modify_write_increment(table) + + +@pytest.mark.asyncio +async def test_read_modify_write_append(table): + await data_snippets.read_modify_write_append(table) + + +@pytest.mark.asyncio +async def test_check_and_mutate(table): + await data_snippets.check_and_mutate(table) diff --git a/samples/snippets/data_client/noxfile.py b/samples/snippets/data_client/noxfile.py index 483b55901..3b7135946 100644 --- a/samples/snippets/data_client/noxfile.py +++ b/samples/snippets/data_client/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From 08c8433c0e97bba82d1c20fd96c3184391bf4e43 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 7 Mar 2024 16:18:24 -0800 Subject: [PATCH 05/24] updated write samples to match existing --- .../data_client/data_client_snippets_async.py | 200 ++++++++---------- .../data_client_snippets_async_test.py | 38 +--- 2 files changed, 105 insertions(+), 133 deletions(-) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index ccfb4bd66..fe75fe6d0 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -14,52 +14,112 @@ # limitations under the License. -async def create_table(project, instance, table): - # [START bigtable_data_create_table] +async def write_simple(table): + # [START bigtable_async_write_simple] from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import SetCell - project_id = "my_project" - instance_id = "my-instance" - table_id = "my-table" - # [END bigtable_data_create_table] - # replace placeholders outside sample - project_id, instance_id, table_id = project, instance, table - # [START bigtable_data_create_table] + async def write_simple(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + row_key = b"phone#4c410523#20190501" - async with BigtableDataClientAsync(project=project_id) as client: - async with client.get_table(instance_id, table_id) as table: - print(f"table: {table}") - # [END bigtable_data_create_table] - return table + cell_mutation = SetCell(family_id, "connected_cell", 1) + wifi_mutation = SetCell(family_id, "connected_wifi", 1) + os_mutation = SetCell(family_id, "os_build", "PQ2A.190405.003") + await table.mutate_row(row_key, cell_mutation) + await table.mutate_row(row_key, wifi_mutation) + await table.mutate_row(row_key, os_mutation) -async def set_cell(table): - # [START bigtable_data_set_cell] - from google.cloud.bigtable.data import SetCell + # [END bigtable_async_write_simple] + await write_simple(table.client.project, table.instance_id, table.table_id) - row_key = b"my_row" - mutation = SetCell(family="family", qualifier="qualifier", new_value="value") - await table.mutate_row(row_key, mutation) - # [END bigtable_data_set_cell] - -async def bulk_mutate(table): - # [START bigtable_data_bulk_mutate] +async def write_batch(table): + # [START bigtable_async_writes_batch] + from google.cloud.bigtable.data import BigtableDataClientAsync from google.cloud.bigtable.data.mutations import SetCell from google.cloud.bigtable.data.mutations import RowMutationEntry - row_key_1 = b"first_row" - row_key_2 = b"second_row" + async def write_batch(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + + mutation_list = [ + SetCell(family_id, "connected_cell", 1), + SetCell(family_id, "connected_wifi", 1), + SetCell(family_id, "os_build", "PQ2A.190405.003"), + ] + + await table.bulk_mutate_rows( + [ + RowMutationEntry("tablet#a0b81f74#20190501", mutation_list), + RowMutationEntry("tablet#a0b81f74#20190502", mutation_list), + ] + ) + # [END bigtable_async_writes_batch] + await write_batch(table.client.project, table.instance_id, table.table_id) + + +async def write_increment(table): + # [START bigtable_async_write_increment] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule + from google.cloud.bigtable.data import SetCell - common_mutation = SetCell(family="family", qualifier="qualifier", new_value="value") - await table.bulk_mutate_rows( - [ - RowMutationEntry(row_key_1, [common_mutation]), - RowMutationEntry(row_key_2, [common_mutation]), - ] - ) - # [END bigtable_data_bulk_mutate] + async def write_increment(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + row_key = "phone#4c410523#20190501" + + # Decrement the connected_wifi value by 1. + increment_rule = IncrementRule( + family_id, "connected_wifi", increment_amount=-1 + ) + result_row = await table.read_modify_write_row(row_key, increment_rule) + # check result + cell = result_row[0] + print(f"{cell.row_key} value: {int(cell)}") + # [END bigtable_async_write_increment] + await write_increment(table.client.project, table.instance_id, table.table_id) + + +async def write_conditional(table): + # [START bigtable_async_writes_conditional] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import row_filters + from google.cloud.bigtable.data import SetCell + + async def write_conditional(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + family_id = "stats_summary" + row_key = "phone#4c410523#20190501" + + row_filter = row_filters.RowFilterChain( + filters=[ + row_filters.FamilyNameRegexFilter(family_id), + row_filters.ColumnQualifierRegexFilter("os_build"), + row_filters.ValueRegexFilter("PQ2A\\..*"), + ] + ) + + if_true = SetCell(family_id, "os_name", "android") + result = await table.check_and_mutate_row( + row_key, + row_filter, + true_case_mutations=if_true, + false_case_mutations=None, + ) + if result is True: + print("The row os_name was set to android") + # [END bigtable_async_writes_conditional] + await write_conditional(table.client.project, table.instance_id, table.table_id) async def mutations_batcher(table): # [START bigtable_data_mutations_batcher] @@ -138,75 +198,3 @@ async def row_exists(table): print(f"The row {row_key} does not exist") # [END bigtable_data_row_exists] return exists - - -async def read_modify_write_increment(table): - # [START bigtable_data_read_modify_write_increment] - from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule - from google.cloud.bigtable.data import SetCell - - row_key = b"my_row" - family = "family" - qualifier = "qualifier" - - # initialize row with a starting value of 1 - await table.mutate_row(row_key, SetCell(family, qualifier, new_value=1)) - - # use read_modify_write to increment the value by 2 - add_two_rule = IncrementRule(family, qualifier, increment_amount=2) - result = await table.read_modify_write_row(row_key, add_two_rule) - - # check result - cell = result[0] - print(f"{cell.row_key} value: {int(cell)}") - assert int(cell) == 3 - # [END bigtable_data_read_modify_write_increment] - - -async def read_modify_write_append(table): - # [START bigtable_data_read_modify_write_append] - from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule - from google.cloud.bigtable.data import SetCell - - row_key = b"my_row" - family = "family" - qualifier = "qualifier" - - # initialize row with a starting value of "hello" - await table.mutate_row(row_key, SetCell(family, qualifier, new_value="hello")) - - # use read_modify_write to append " world" to the value - append_world_rule = AppendValueRule(family, qualifier, append_value=" world") - result = await table.read_modify_write_row(row_key, append_world_rule) - - # check result - cell = result[0] - print(f"{cell.row_key} value: {cell.value}") - assert cell.value == b"hello world" - # [END bigtable_data_read_modify_append] - - -async def check_and_mutate(table): - # [START bigtable_data_check_and_mutate] - from google.cloud.bigtable.data.row_filters import ValueRangeFilter - from google.cloud.bigtable.data import SetCell - - row_key = b"my_row" - family = "family" - qualifier = "qualifier" - - # create a predicate filter to test against - # in this case, use a ValueRangeFilter to check if the value is positive or negative - predicate = ValueRangeFilter(start_value=0, inclusive_start=True) - # use check and mutate to change the value in the row based on the predicate - was_true = await table.check_and_mutate_row( - row_key, - predicate, - true_case_mutations=SetCell(family, qualifier, new_value="positive"), - false_case_mutations=SetCell(family, qualifier, new_value="negative"), - ) - if was_true: - print("The value was positive") - else: - print("The value was negative") - # [END bigtable_data_check_and_mutate] diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py index e85f13d47..7bb59cf30 100644 --- a/samples/snippets/data_client/data_client_snippets_async_test.py +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -36,7 +36,7 @@ def table_id(): admin_table = instance.table(table_id) if not admin_table.exists(): - admin_table.create(column_families={"family": None}) + admin_table.create(column_families={"family": None, "stats_summary": None}) yield table_id @@ -55,24 +55,23 @@ async def table(table_id): @pytest.mark.asyncio -async def test_create_table(table_id): - from google.cloud.bigtable.data import TableAsync +async def test_write_simple(table): + await data_snippets.write_simple(table) - result = await data_snippets.create_table(PROJECT, BIGTABLE_INSTANCE, table_id) - assert isinstance(result, TableAsync) - assert result.table_id == table_id - assert result.instance_id == BIGTABLE_INSTANCE - assert result.client.project == PROJECT + +@pytest.mark.asyncio +async def test_write_batch(table): + await data_snippets.write_batch(table) @pytest.mark.asyncio -async def test_set_cell(table): - await data_snippets.set_cell(table) +async def test_write_increment(table): + await data_snippets.write_increment(table) @pytest.mark.asyncio -async def test_bulk_mutate(table): - await data_snippets.bulk_mutate(table) +async def test_write_conditional(table): + await data_snippets.write_conditional(table) @pytest.mark.asyncio @@ -106,18 +105,3 @@ async def test_read_rows_sharded(table): @pytest.mark.asyncio async def test_row_exists(table): await data_snippets.row_exists(table) - - -@pytest.mark.asyncio -async def test_read_modify_write_increment(table): - await data_snippets.read_modify_write_increment(table) - - -@pytest.mark.asyncio -async def test_read_modify_write_append(table): - await data_snippets.read_modify_write_append(table) - - -@pytest.mark.asyncio -async def test_check_and_mutate(table): - await data_snippets.check_and_mutate(table) From 7bcea6d92af444bd2796db790ae3d09c8b83107d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 7 Mar 2024 16:46:04 -0800 Subject: [PATCH 06/24] updated read_rows samples --- .../data_client/data_client_snippets_async.py | 152 +++++++++++------- .../data_client_snippets_async_test.py | 27 ++-- 2 files changed, 106 insertions(+), 73 deletions(-) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index fe75fe6d0..5a0e70991 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -121,80 +121,116 @@ async def write_conditional(project_id, instance_id, table_id): # [END bigtable_async_writes_conditional] await write_conditional(table.client.project, table.instance_id, table.table_id) -async def mutations_batcher(table): - # [START bigtable_data_mutations_batcher] - from google.cloud.bigtable.data.mutations import SetCell - from google.cloud.bigtable.data import RowMutationEntry - common_mutation = SetCell(family="family", qualifier="qualifier", new_value="value") +async def read_row(table): + # [START bigtable_async_reads_row] + from google.cloud.bigtable.data import BigtableDataClientAsync - async with table.mutations_batcher( - flush_limit_mutation_count=2, flush_limit_bytes=1024 - ) as batcher: - for i in range(10): - row_key = f"row-{i}" - batcher.append(RowMutationEntry(row_key, [common_mutation])) - # [END bigtable_data_mutations_batcher] - return batcher + async def read_row(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + row_key = "phone#4c410523#20190501" + row = await table.read_row(row_key) + print(row) + # [END bigtable_async_reads_row] + await read_row(table.client.project, table.instance_id, table.table_id) -async def read_row(table): - # [START bigtable_data_read_row] - row = await table.read_row(b"my_row") - print(row.row_key) - # [END bigtable_data_read_row] - return row +async def read_row_partial(table): + # [START bigtable_async_reads_row_partial] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import row_filters + + async def read_row_partial(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + row_key = "phone#4c410523#20190501" + col_filter = row_filters.ColumnQualifierRegexFilter(b"os_build") + + row = await table.read_row(row_key, row_filter=col_filter) + print(row) + # [END bigtable_async_reads_row_partial] + await read_row_partial(table.client.project, table.instance_id, table.table_id) -async def read_rows_list(table): - # [START bigtable_data_read_rows_list] +async def read_rows_multiple(table): + # [START bigtable_async_reads_rows] + from google.cloud.bigtable.data import BigtableDataClientAsync from google.cloud.bigtable.data import ReadRowsQuery - from google.cloud.bigtable.data import RowRange - query = ReadRowsQuery(row_ranges=[RowRange("a", "z")]) + async def read_rows(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + query = ReadRowsQuery(row_keys=[ + b"phone#4c410523#20190501", + b"phone#4c410523#20190502" + ]) + async for row in await table.read_rows_stream(query): + print(row) - row_list = await table.read_rows(query) - for row in row_list: - print(row.row_key) - # [END bigtable_data_read_rows_list] - return row_list + # [END bigtable_async_reads_rows] + await read_rows(table.client.project, table.instance_id, table.table_id) -async def read_rows_stream(table): - # [START bigtable_data_read_rows_stream] +async def read_row_range(table): + # [START bigtable_async_reads_row_range] + from google.cloud.bigtable.data import BigtableDataClientAsync from google.cloud.bigtable.data import ReadRowsQuery from google.cloud.bigtable.data import RowRange - query = ReadRowsQuery(row_ranges=[RowRange("a", "z")]) - async for row in await table.read_rows_stream(query): - print(row.row_key) - # [END bigtable_data_read_rows_stream] + async def read_row_range(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + row_range = RowRange( + start_key=b"phone#4c410523#20190501", + end_key=b"phone#4c410523#201906201" + ) + query = ReadRowsQuery(row_ranges=[row_range]) + async for row in await table.read_rows_stream(query): + print(row) + # [END bigtable_async_reads_row_range] + await read_row_range(table.client.project, table.instance_id, table.table_id) -async def read_rows_sharded(table): - # [START bigtable_data_read_rows_sharded] + +async def read_with_prefix(table): + # [START bigtable_async_reads_prefix] + from google.cloud.bigtable.data import BigtableDataClientAsync from google.cloud.bigtable.data import ReadRowsQuery from google.cloud.bigtable.data import RowRange - # find shard keys for table - table_shard_keys = await table.sample_row_keys() - # construct shared query - query = ReadRowsQuery(row_ranges=[RowRange("a", "z")]) - shard_queries = query.shard(table_shard_keys) - # execute sharded query - row_list = await table.read_rows_sharded(shard_queries) - for row in row_list: - print(row.row_key) - # [END bigtable_data_read_rows_sharded] - - -async def row_exists(table): - # [START bigtable_data_row_exists] - row_key = b"my_row" - exists = await table.row_exists(row_key) - if exists: - print(f"The row {row_key} exists") - else: - print(f"The row {row_key} does not exist") - # [END bigtable_data_row_exists] - return exists + async def read_prefix(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + prefix = "phone#" + end_key = prefix[:-1] + chr(ord(prefix[-1]) + 1) + prefix_range = RowRange(start_key=prefix, end_key=end_key) + query = ReadRowsQuery(row_ranges=[prefix_range]) + + async for row in await table.read_rows_stream(query): + print(row) + # [END bigtable_async_reads_prefix] + await read_prefix(table.client.project, table.instance_id, table.table_id) + + +async def read_with_filter(table): + # [START bigtable_async_reads_filter] + from google.cloud.bigtable.data import BigtableDataClientAsync + from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data import row_filters + + async def read_with_filter(project_id, instance_id, table_id): + async with BigtableDataClientAsync(project=project_id) as client: + async with client.get_table(instance_id, table_id) as table: + + row_filter = row_filters.ValueRegexFilter(b"PQ2A.*$") + query = ReadRowsQuery(row_filter=row_filter) + + async for row in await table.read_rows_stream(query): + print(row) + # [END bigtable_async_reads_filter] + await read_with_filter(table.client.project, table.instance_id, table.table_id) + diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py index 7bb59cf30..523112743 100644 --- a/samples/snippets/data_client/data_client_snippets_async_test.py +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -75,33 +75,30 @@ async def test_write_conditional(table): @pytest.mark.asyncio -async def test_mutations_batcher(table): - from google.cloud.bigtable.data import MutationsBatcherAsync - - batcher = await data_snippets.mutations_batcher(table) - assert isinstance(batcher, MutationsBatcherAsync) +async def test_read_row(table): + await data_snippets.read_row(table) @pytest.mark.asyncio -async def test_read_row(table): - await data_snippets.read_row(table) +async def test_read_row_partial(table): + await data_snippets.read_row_partial(table) @pytest.mark.asyncio -async def test_read_rows_list(table): - await data_snippets.read_rows_list(table) +async def test_read_rows_multiple(table): + await data_snippets.read_rows_multiple(table) @pytest.mark.asyncio -async def test_read_rows_stream(table): - await data_snippets.read_rows_stream(table) +async def test_read_row_range(table): + await data_snippets.read_row_range(table) @pytest.mark.asyncio -async def test_read_rows_sharded(table): - await data_snippets.read_rows_sharded(table) +async def test_read_with_prefix(table): + await data_snippets.read_with_prefix(table) @pytest.mark.asyncio -async def test_row_exists(table): - await data_snippets.row_exists(table) +async def test_read_with_filter(table): + await data_snippets.read_with_filter(table) From a74a86535dbe3dc59d61b1a0494d3b8f8329dcc1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 7 Mar 2024 17:25:57 -0800 Subject: [PATCH 07/24] added new async hello world sample --- samples/hello/async_main.py | 137 ++++++++++++++++++ samples/hello/async_main_test.py | 39 +++++ samples/hello/requirements.txt | 2 +- .../data_client_snippets_async_test.py | 1 - 4 files changed, 177 insertions(+), 2 deletions(-) create mode 100644 samples/hello/async_main.py create mode 100644 samples/hello/async_main_test.py diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py new file mode 100644 index 000000000..6f72ddf16 --- /dev/null +++ b/samples/hello/async_main.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Demonstrates how to connect to Cloud Bigtable and run some basic operations. + +Prerequisites: + +- Create a Cloud Bigtable cluster. + https://cloud.google.com/bigtable/docs/creating-cluster +- Set your Google Application Default Credentials. + https://developers.google.com/identity/protocols/application-default-credentials +""" + +import argparse +import asyncio + +# [START bigtable_async_hw_imports] +from google.cloud import bigtable +from google.cloud.bigtable.data import row_filters +from google.cloud.bigtable.data import RowMutationEntry +from google.cloud.bigtable.data import SetCell +from google.cloud.bigtable.data import ReadRowsQuery + +# [END bigtable_hw_imports] + + +async def main(project_id, instance_id, table_id): + # [START bigtable_async_hw_connect] + client = bigtable.data.BigtableDataClientAsync(project=project_id) + table = client.get_table(instance_id, table_id) + # [END bigtable_hw_connect] + + # [START bigtable_async_hw_create_table] + from google.cloud.bigtable import column_family + # the async client only supports the data API. Table creation as an admin operation + # use admin client to create the table + print("Creating the {} table.".format(table_id)) + admin_client = bigtable.Client(project=project_id, admin=True) + admin_instance = admin_client.instance(instance_id) + admin_table = admin_instance.table(table_id) + + print("Creating column family cf1 with Max Version GC rule...") + # Create a column family with GC policy : most recent N versions + # Define the GC policy to retain only the most recent 2 versions + max_versions_rule = column_family.MaxVersionsGCRule(2) + column_family_id = "cf1" + column_families = {column_family_id: max_versions_rule} + if not admin_table.exists(): + admin_table.create(column_families=column_families) + else: + print("Table {} already exists.".format(table_id)) + # [END bigtable_hw_create_table] + + # [START bigtable_async_hw_write_rows] + print("Writing some greetings to the table.") + greetings = ["Hello World!", "Hello Cloud Bigtable!", "Hello Python!"] + mutations = [] + column = "greeting" + for i, value in enumerate(greetings): + # Note: This example uses sequential numeric IDs for simplicity, + # but this can result in poor performance in a production + # application. Since rows are stored in sorted order by key, + # sequential keys can result in poor distribution of operations + # across nodes. + # + # For more information about how to design a Bigtable schema for + # the best performance, see the documentation: + # + # https://cloud.google.com/bigtable/docs/schema-design + row_key = "greeting{}".format(i).encode() + row_mutation = RowMutationEntry(row_key, SetCell(column_family_id, column, value)) + mutations.append(row_mutation) + await table.bulk_mutate_rows(mutations) + # [END bigtable_hw_write_rows] + + # [START bigtable_async_hw_create_filter] + # Create a filter to only retrieve the most recent version of the cell + # for each column across entire row. + row_filter = row_filters.CellsColumnLimitFilter(1) + # [END bigtable_hw_create_filter] + + # [START bigtable_async_hw_get_with_filter] + # [START bigtable_async_hw_get_by_key] + print("Getting a single greeting by row key.") + key = "greeting0".encode() + + row = await table.read_row(key, row_filter=row_filter) + cell = row.cells[0] + print(cell.value.decode("utf-8")) + # [END bigtable_hw_get_by_key] + # [END bigtable_hw_get_with_filter] + + # [START bigtable_async_hw_scan_with_filter] + # [START bigtable_async_hw_scan_all] + print("Scanning for all greetings:") + query = ReadRowsQuery(row_filter=row_filter) + async for row in await table.read_rows_stream(query): + cell = row.cells[0] + print(cell.value.decode("utf-8")) + # [END bigtable_hw_scan_all] + # [END bigtable_hw_scan_with_filter] + + # [START bigtable_async_hw_delete_table] + # the async client only supports the data API. Table deletion as an admin operation + # use admin client to create the table + print("Deleting the {} table.".format(table_id)) + admin_table.delete() + # [END bigtable_hw_delete_table] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("project_id", help="Your Cloud Platform project ID.") + parser.add_argument( + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) + parser.add_argument( + "--table", help="Table to create and destroy.", default="Hello-Bigtable" + ) + + args = parser.parse_args() + asyncio.run(main(args.project_id, args.instance_id, args.table)) diff --git a/samples/hello/async_main_test.py b/samples/hello/async_main_test.py new file mode 100644 index 000000000..093746bb2 --- /dev/null +++ b/samples/hello/async_main_test.py @@ -0,0 +1,39 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import random +import asyncio + +from async_main import main + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_NAME_FORMAT = "hello-world-test-{}" +TABLE_NAME_RANGE = 10000 + + +def test_async_main(capsys): + table_name = TABLE_NAME_FORMAT.format(random.randrange(TABLE_NAME_RANGE)) + + asyncio.run(main(PROJECT, BIGTABLE_INSTANCE, table_name)) + + out, _ = capsys.readouterr() + assert "Creating the {} table.".format(table_name) in out + assert "Writing some greetings to the table." in out + assert "Getting a single greeting by row key." in out + assert "Hello World!" in out + assert "Scanning for all greetings" in out + assert "Hello Cloud Bigtable!" in out + assert "Deleting the {} table.".format(table_name) in out diff --git a/samples/hello/requirements.txt b/samples/hello/requirements.txt index 68419fbcb..dd4fc1fb3 100644 --- a/samples/hello/requirements.txt +++ b/samples/hello/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigtable==2.22.0 +google-cloud-bigtable==2.23.0 google-cloud-core==2.4.1 diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py index 523112743..9feeda304 100644 --- a/samples/snippets/data_client/data_client_snippets_async_test.py +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -14,7 +14,6 @@ import pytest_asyncio import uuid import os -import asyncio import data_client_snippets_async as data_snippets From 297ee3f71a77b736394a0629d7065489e0ae3b0c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:18:16 -0700 Subject: [PATCH 08/24] changed copyright date --- samples/hello/async_main.py | 2 +- samples/hello/async_main_test.py | 2 +- samples/snippets/data_client/data_client_snippets_async.py | 2 +- samples/snippets/data_client/data_client_snippets_async_test.py | 2 +- samples/snippets/data_client/noxfile.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py index 6f72ddf16..ba76bbc28 100644 --- a/samples/hello/async_main.py +++ b/samples/hello/async_main.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc. +# Copyright 2024 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/hello/async_main_test.py b/samples/hello/async_main_test.py index 093746bb2..a47ac2d33 100644 --- a/samples/hello/async_main_test.py +++ b/samples/hello/async_main_test.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2024 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index 5a0e70991..1e4894f8b 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023, Google LLC +# Copyright 2024, Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/samples/snippets/data_client/data_client_snippets_async_test.py b/samples/snippets/data_client/data_client_snippets_async_test.py index 9feeda304..d9968e6dc 100644 --- a/samples/snippets/data_client/data_client_snippets_async_test.py +++ b/samples/snippets/data_client/data_client_snippets_async_test.py @@ -1,4 +1,4 @@ -# Copyright 2023, Google LLC +# Copyright 2024, Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/samples/snippets/data_client/noxfile.py b/samples/snippets/data_client/noxfile.py index 3b7135946..6967925a8 100644 --- a/samples/snippets/data_client/noxfile.py +++ b/samples/snippets/data_client/noxfile.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ec0141f40d6ebc998ff3bbfb872f15774b70c0a4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:20:19 -0700 Subject: [PATCH 09/24] updated docstrings --- samples/hello/async_main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py index ba76bbc28..eae909a25 100644 --- a/samples/hello/async_main.py +++ b/samples/hello/async_main.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Demonstrates how to connect to Cloud Bigtable and run some basic operations. +"""Demonstrates how to connect to Cloud Bigtable and run some basic operations with the async APIs Prerequisites: From 2f4d82bf83bc9dc829213260cc91320a6e661c6f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:21:17 -0700 Subject: [PATCH 10/24] ran blacken --- samples/hello/async_main.py | 5 ++++- samples/hello/noxfile.py | 15 ++++++++------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py index eae909a25..330eebbc8 100644 --- a/samples/hello/async_main.py +++ b/samples/hello/async_main.py @@ -45,6 +45,7 @@ async def main(project_id, instance_id, table_id): # [START bigtable_async_hw_create_table] from google.cloud.bigtable import column_family + # the async client only supports the data API. Table creation as an admin operation # use admin client to create the table print("Creating the {} table.".format(table_id)) @@ -81,7 +82,9 @@ async def main(project_id, instance_id, table_id): # # https://cloud.google.com/bigtable/docs/schema-design row_key = "greeting{}".format(i).encode() - row_mutation = RowMutationEntry(row_key, SetCell(column_family_id, column, value)) + row_mutation = RowMutationEntry( + row_key, SetCell(column_family_id, column, value) + ) mutations.append(row_mutation) await table.bulk_mutate_rows(mutations) # [END bigtable_hw_write_rows] diff --git a/samples/hello/noxfile.py b/samples/hello/noxfile.py index 483b55901..3b7135946 100644 --- a/samples/hello/noxfile.py +++ b/samples/hello/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From 773065afa0a1ff2c0a4e978ed0c5f3408bc7ae39 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:22:40 -0700 Subject: [PATCH 11/24] updated cluster to instance in docstring --- samples/hello/async_main.py | 4 ++-- samples/hello/main.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py index 330eebbc8..5e382e449 100644 --- a/samples/hello/async_main.py +++ b/samples/hello/async_main.py @@ -18,8 +18,8 @@ Prerequisites: -- Create a Cloud Bigtable cluster. - https://cloud.google.com/bigtable/docs/creating-cluster +- Create a Cloud Bigtable instance. + https://cloud.google.com/bigtable/docs/creating-instance - Set your Google Application Default Credentials. https://developers.google.com/identity/protocols/application-default-credentials """ diff --git a/samples/hello/main.py b/samples/hello/main.py index 5e47b4a38..3b7de34b0 100644 --- a/samples/hello/main.py +++ b/samples/hello/main.py @@ -18,8 +18,8 @@ Prerequisites: -- Create a Cloud Bigtable cluster. - https://cloud.google.com/bigtable/docs/creating-cluster +- Create a Cloud Bigtable instance. + https://cloud.google.com/bigtable/docs/creating-instance - Set your Google Application Default Credentials. https://developers.google.com/identity/protocols/application-default-credentials """ From 6dce5090207bfcaff5bf336baa15b34ff3fcc57c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:42:54 -0700 Subject: [PATCH 12/24] use batchers in bulk write samples --- .../data_client/data_client_snippets_async.py | 24 ++++++++------- samples/snippets/writes/write_batch.py | 29 ++++++++++--------- 2 files changed, 28 insertions(+), 25 deletions(-) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index 1e4894f8b..6328c0c07 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -48,18 +48,20 @@ async def write_batch(project_id, instance_id, table_id): async with client.get_table(instance_id, table_id) as table: family_id = "stats_summary" - mutation_list = [ - SetCell(family_id, "connected_cell", 1), - SetCell(family_id, "connected_wifi", 1), - SetCell(family_id, "os_build", "PQ2A.190405.003"), - ] - - await table.bulk_mutate_rows( - [ - RowMutationEntry("tablet#a0b81f74#20190501", mutation_list), - RowMutationEntry("tablet#a0b81f74#20190502", mutation_list), + async with table.mutations_batcher( + flush_limit_mutation_count=2, flush_limit_bytes=1024 + ) as batcher: + mutation_list = [ + SetCell(family_id, "connected_cell", 1), + SetCell(family_id, "connected_wifi", 1), + SetCell(family_id, "os_build", "12155.0.0-rc1"), ] - ) + batcher.append( + RowMutationEntry("tablet#a0b81f74#20190501", mutation_list) + ) + batcher.append( + RowMutationEntry("tablet#a0b81f74#20190502", mutation_list) + ) # [END bigtable_async_writes_batch] await write_batch(table.client.project, table.instance_id, table.table_id) diff --git a/samples/snippets/writes/write_batch.py b/samples/snippets/writes/write_batch.py index fd5117242..75ce78312 100644 --- a/samples/snippets/writes/write_batch.py +++ b/samples/snippets/writes/write_batch.py @@ -16,6 +16,7 @@ import datetime from google.cloud import bigtable +from google.cloud.bigtable.batcher import MutationsBatcher def write_batch(project_id, instance_id, table_id): @@ -23,23 +24,23 @@ def write_batch(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - timestamp = datetime.datetime.utcnow() - column_family_id = "stats_summary" + with MutationsBatcher( + table=table, flush_count=2, max_row_bytes=1024 + ) as batcher: + timestamp = datetime.datetime.utcnow() + column_family_id = "stats_summary" - rows = [ - table.direct_row("tablet#a0b81f74#20190501"), - table.direct_row("tablet#a0b81f74#20190502"), - ] + rows = [ + table.direct_row("tablet#a0b81f74#20190501"), + table.direct_row("tablet#a0b81f74#20190502"), + ] - rows[0].set_cell(column_family_id, "connected_wifi", 1, timestamp) - rows[0].set_cell(column_family_id, "os_build", "12155.0.0-rc1", timestamp) - rows[1].set_cell(column_family_id, "connected_wifi", 1, timestamp) - rows[1].set_cell(column_family_id, "os_build", "12145.0.0-rc6", timestamp) + rows[0].set_cell(column_family_id, "connected_wifi", 1, timestamp) + rows[0].set_cell(column_family_id, "os_build", "12155.0.0-rc1", timestamp) + rows[1].set_cell(column_family_id, "connected_wifi", 1, timestamp) + rows[1].set_cell(column_family_id, "os_build", "12145.0.0-rc6", timestamp) - response = table.mutate_rows(rows) - for i, status in enumerate(response): - if status.code != 0: - print("Error writing row: {}".format(status.message)) + batcher.mutate_rows(rows) print("Successfully wrote 2 rows.") From 87336039abe8a8096b19739d55d20a140a72ae15 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:52:26 -0700 Subject: [PATCH 13/24] fixed lint --- samples/snippets/data_client/data_client_snippets_async.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index 6328c0c07..b3d648e5c 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -70,7 +70,6 @@ async def write_increment(table): # [START bigtable_async_write_increment] from google.cloud.bigtable.data import BigtableDataClientAsync from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule - from google.cloud.bigtable.data import SetCell async def write_increment(project_id, instance_id, table_id): async with BigtableDataClientAsync(project=project_id) as client: @@ -235,4 +234,3 @@ async def read_with_filter(project_id, instance_id, table_id): print(row) # [END bigtable_async_reads_filter] await read_with_filter(table.client.project, table.instance_id, table.table_id) - From 668191f0dde92f474fa09474f11e53ad85bfcb29 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 12:58:53 -0700 Subject: [PATCH 14/24] fixed end region tags --- samples/hello/async_main.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/samples/hello/async_main.py b/samples/hello/async_main.py index 5e382e449..d608bb073 100644 --- a/samples/hello/async_main.py +++ b/samples/hello/async_main.py @@ -34,14 +34,14 @@ from google.cloud.bigtable.data import SetCell from google.cloud.bigtable.data import ReadRowsQuery -# [END bigtable_hw_imports] +# [END bigtable_async_hw_imports] async def main(project_id, instance_id, table_id): # [START bigtable_async_hw_connect] client = bigtable.data.BigtableDataClientAsync(project=project_id) table = client.get_table(instance_id, table_id) - # [END bigtable_hw_connect] + # [END bigtable_async_hw_connect] # [START bigtable_async_hw_create_table] from google.cloud.bigtable import column_family @@ -63,7 +63,7 @@ async def main(project_id, instance_id, table_id): admin_table.create(column_families=column_families) else: print("Table {} already exists.".format(table_id)) - # [END bigtable_hw_create_table] + # [END bigtable_async_hw_create_table] # [START bigtable_async_hw_write_rows] print("Writing some greetings to the table.") @@ -87,13 +87,13 @@ async def main(project_id, instance_id, table_id): ) mutations.append(row_mutation) await table.bulk_mutate_rows(mutations) - # [END bigtable_hw_write_rows] + # [END bigtable_async_hw_write_rows] # [START bigtable_async_hw_create_filter] # Create a filter to only retrieve the most recent version of the cell # for each column across entire row. row_filter = row_filters.CellsColumnLimitFilter(1) - # [END bigtable_hw_create_filter] + # [END bigtable_async_hw_create_filter] # [START bigtable_async_hw_get_with_filter] # [START bigtable_async_hw_get_by_key] @@ -103,8 +103,8 @@ async def main(project_id, instance_id, table_id): row = await table.read_row(key, row_filter=row_filter) cell = row.cells[0] print(cell.value.decode("utf-8")) - # [END bigtable_hw_get_by_key] - # [END bigtable_hw_get_with_filter] + # [END bigtable_async_hw_get_by_key] + # [END bigtable_async_hw_get_with_filter] # [START bigtable_async_hw_scan_with_filter] # [START bigtable_async_hw_scan_all] @@ -113,15 +113,15 @@ async def main(project_id, instance_id, table_id): async for row in await table.read_rows_stream(query): cell = row.cells[0] print(cell.value.decode("utf-8")) - # [END bigtable_hw_scan_all] - # [END bigtable_hw_scan_with_filter] + # [END bigtable_async_hw_scan_all] + # [END bigtable_async_hw_scan_with_filter] # [START bigtable_async_hw_delete_table] # the async client only supports the data API. Table deletion as an admin operation # use admin client to create the table print("Deleting the {} table.".format(table_id)) admin_table.delete() - # [END bigtable_hw_delete_table] + # [END bigtable_async_hw_delete_table] if __name__ == "__main__": From ac0d4a8820164fbd5d8becaf9632aab2a3672ead Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 13:12:35 -0700 Subject: [PATCH 15/24] downgrade pytest version for py37 --- samples/beam/requirements-test.txt | 2 +- samples/hello/requirements-test.txt | 2 +- samples/hello_happybase/requirements-test.txt | 2 +- samples/instanceadmin/requirements-test.txt | 2 +- samples/metricscaler/requirements-test.txt | 2 +- samples/quickstart/requirements-test.txt | 2 +- samples/quickstart_happybase/requirements-test.txt | 2 +- samples/snippets/data_client/requirements-test.txt | 2 +- samples/snippets/deletes/requirements-test.txt | 2 +- samples/snippets/filters/requirements-test.txt | 2 +- samples/snippets/reads/requirements-test.txt | 2 +- samples/snippets/writes/requirements-test.txt | 2 +- samples/tableadmin/requirements-test.txt | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/samples/beam/requirements-test.txt b/samples/beam/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/beam/requirements-test.txt +++ b/samples/beam/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/hello/requirements-test.txt b/samples/hello/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/hello/requirements-test.txt +++ b/samples/hello/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/hello_happybase/requirements-test.txt b/samples/hello_happybase/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/hello_happybase/requirements-test.txt +++ b/samples/hello_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/instanceadmin/requirements-test.txt b/samples/instanceadmin/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/instanceadmin/requirements-test.txt +++ b/samples/instanceadmin/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/metricscaler/requirements-test.txt b/samples/metricscaler/requirements-test.txt index 8b8270b6c..c0d4f7003 100644 --- a/samples/metricscaler/requirements-test.txt +++ b/samples/metricscaler/requirements-test.txt @@ -1,3 +1,3 @@ -pytest==8.0.0 +pytest==7.4.4 mock==5.1.0 google-cloud-testutils diff --git a/samples/quickstart/requirements-test.txt b/samples/quickstart/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/quickstart/requirements-test.txt +++ b/samples/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/quickstart_happybase/requirements-test.txt b/samples/quickstart_happybase/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/quickstart_happybase/requirements-test.txt +++ b/samples/quickstart_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/data_client/requirements-test.txt b/samples/snippets/data_client/requirements-test.txt index bbeed6cc5..5cb431d92 100644 --- a/samples/snippets/data_client/requirements-test.txt +++ b/samples/snippets/data_client/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==8.0.0 +pytest==7.4.4 pytest-asyncio diff --git a/samples/snippets/deletes/requirements-test.txt b/samples/snippets/deletes/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/deletes/requirements-test.txt +++ b/samples/snippets/deletes/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/filters/requirements-test.txt b/samples/snippets/filters/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/filters/requirements-test.txt +++ b/samples/snippets/filters/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/reads/requirements-test.txt b/samples/snippets/reads/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/reads/requirements-test.txt +++ b/samples/snippets/reads/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/writes/requirements-test.txt b/samples/snippets/writes/requirements-test.txt index aaa563abc..43b02e724 100644 --- a/samples/snippets/writes/requirements-test.txt +++ b/samples/snippets/writes/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/tableadmin/requirements-test.txt b/samples/tableadmin/requirements-test.txt index b4d30f505..aa143f59d 100644 --- a/samples/tableadmin/requirements-test.txt +++ b/samples/tableadmin/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==8.0.0 +pytest==7.4.4 google-cloud-testutils==1.4.0 From 11548bb72d1574afb4905cedd5b2420ea7040a3f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 13:20:23 -0700 Subject: [PATCH 16/24] removed batcher args --- samples/snippets/data_client/data_client_snippets_async.py | 4 +--- samples/snippets/writes/write_batch.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/samples/snippets/data_client/data_client_snippets_async.py b/samples/snippets/data_client/data_client_snippets_async.py index b3d648e5c..cb51bdc78 100644 --- a/samples/snippets/data_client/data_client_snippets_async.py +++ b/samples/snippets/data_client/data_client_snippets_async.py @@ -48,9 +48,7 @@ async def write_batch(project_id, instance_id, table_id): async with client.get_table(instance_id, table_id) as table: family_id = "stats_summary" - async with table.mutations_batcher( - flush_limit_mutation_count=2, flush_limit_bytes=1024 - ) as batcher: + async with table.mutations_batcher() as batcher: mutation_list = [ SetCell(family_id, "connected_cell", 1), SetCell(family_id, "connected_wifi", 1), diff --git a/samples/snippets/writes/write_batch.py b/samples/snippets/writes/write_batch.py index 75ce78312..8ad4b07a5 100644 --- a/samples/snippets/writes/write_batch.py +++ b/samples/snippets/writes/write_batch.py @@ -24,9 +24,7 @@ def write_batch(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - with MutationsBatcher( - table=table, flush_count=2, max_row_bytes=1024 - ) as batcher: + with MutationsBatcher(table=table) as batcher: timestamp = datetime.datetime.utcnow() column_family_id = "stats_summary" From 08f6de7a47aacd85f76f0c4bcd67f65e56336220 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 13:31:37 -0700 Subject: [PATCH 17/24] Revert "downgrade pytest version for py37" This reverts commit ac0d4a8820164fbd5d8becaf9632aab2a3672ead. --- samples/beam/requirements-test.txt | 2 +- samples/hello/requirements-test.txt | 2 +- samples/hello_happybase/requirements-test.txt | 2 +- samples/instanceadmin/requirements-test.txt | 2 +- samples/metricscaler/requirements-test.txt | 2 +- samples/quickstart/requirements-test.txt | 2 +- samples/quickstart_happybase/requirements-test.txt | 2 +- samples/snippets/data_client/requirements-test.txt | 2 +- samples/snippets/deletes/requirements-test.txt | 2 +- samples/snippets/filters/requirements-test.txt | 2 +- samples/snippets/reads/requirements-test.txt | 2 +- samples/snippets/writes/requirements-test.txt | 2 +- samples/tableadmin/requirements-test.txt | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/samples/beam/requirements-test.txt b/samples/beam/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/beam/requirements-test.txt +++ b/samples/beam/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/hello/requirements-test.txt b/samples/hello/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/hello/requirements-test.txt +++ b/samples/hello/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/hello_happybase/requirements-test.txt b/samples/hello_happybase/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/hello_happybase/requirements-test.txt +++ b/samples/hello_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/instanceadmin/requirements-test.txt b/samples/instanceadmin/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/instanceadmin/requirements-test.txt +++ b/samples/instanceadmin/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/metricscaler/requirements-test.txt b/samples/metricscaler/requirements-test.txt index c0d4f7003..8b8270b6c 100644 --- a/samples/metricscaler/requirements-test.txt +++ b/samples/metricscaler/requirements-test.txt @@ -1,3 +1,3 @@ -pytest==7.4.4 +pytest==8.0.0 mock==5.1.0 google-cloud-testutils diff --git a/samples/quickstart/requirements-test.txt b/samples/quickstart/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/quickstart/requirements-test.txt +++ b/samples/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/quickstart_happybase/requirements-test.txt b/samples/quickstart_happybase/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/quickstart_happybase/requirements-test.txt +++ b/samples/quickstart_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/snippets/data_client/requirements-test.txt b/samples/snippets/data_client/requirements-test.txt index 5cb431d92..bbeed6cc5 100644 --- a/samples/snippets/data_client/requirements-test.txt +++ b/samples/snippets/data_client/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.4.4 +pytest==8.0.0 pytest-asyncio diff --git a/samples/snippets/deletes/requirements-test.txt b/samples/snippets/deletes/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/snippets/deletes/requirements-test.txt +++ b/samples/snippets/deletes/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/snippets/filters/requirements-test.txt b/samples/snippets/filters/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/snippets/filters/requirements-test.txt +++ b/samples/snippets/filters/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/snippets/reads/requirements-test.txt b/samples/snippets/reads/requirements-test.txt index cb87efc0f..8075a1ec5 100644 --- a/samples/snippets/reads/requirements-test.txt +++ b/samples/snippets/reads/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/snippets/writes/requirements-test.txt b/samples/snippets/writes/requirements-test.txt index 43b02e724..aaa563abc 100644 --- a/samples/snippets/writes/requirements-test.txt +++ b/samples/snippets/writes/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.4.4 +pytest==8.0.0 diff --git a/samples/tableadmin/requirements-test.txt b/samples/tableadmin/requirements-test.txt index aa143f59d..b4d30f505 100644 --- a/samples/tableadmin/requirements-test.txt +++ b/samples/tableadmin/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.4.4 +pytest==8.0.0 google-cloud-testutils==1.4.0 From 68a1390f1a3210eca11bc6fb965e37c46ff2d354 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Mar 2024 14:40:19 -0700 Subject: [PATCH 18/24] Reapply "downgrade pytest version for py37" This reverts commit 08f6de7a47aacd85f76f0c4bcd67f65e56336220. --- samples/beam/requirements-test.txt | 2 +- samples/hello/requirements-test.txt | 2 +- samples/hello_happybase/requirements-test.txt | 2 +- samples/instanceadmin/requirements-test.txt | 2 +- samples/metricscaler/requirements-test.txt | 2 +- samples/quickstart/requirements-test.txt | 2 +- samples/quickstart_happybase/requirements-test.txt | 2 +- samples/snippets/data_client/requirements-test.txt | 2 +- samples/snippets/deletes/requirements-test.txt | 2 +- samples/snippets/filters/requirements-test.txt | 2 +- samples/snippets/reads/requirements-test.txt | 2 +- samples/snippets/writes/requirements-test.txt | 2 +- samples/tableadmin/requirements-test.txt | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/samples/beam/requirements-test.txt b/samples/beam/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/beam/requirements-test.txt +++ b/samples/beam/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/hello/requirements-test.txt b/samples/hello/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/hello/requirements-test.txt +++ b/samples/hello/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/hello_happybase/requirements-test.txt b/samples/hello_happybase/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/hello_happybase/requirements-test.txt +++ b/samples/hello_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/instanceadmin/requirements-test.txt b/samples/instanceadmin/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/instanceadmin/requirements-test.txt +++ b/samples/instanceadmin/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/metricscaler/requirements-test.txt b/samples/metricscaler/requirements-test.txt index 8b8270b6c..c0d4f7003 100644 --- a/samples/metricscaler/requirements-test.txt +++ b/samples/metricscaler/requirements-test.txt @@ -1,3 +1,3 @@ -pytest==8.0.0 +pytest==7.4.4 mock==5.1.0 google-cloud-testutils diff --git a/samples/quickstart/requirements-test.txt b/samples/quickstart/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/quickstart/requirements-test.txt +++ b/samples/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/quickstart_happybase/requirements-test.txt b/samples/quickstart_happybase/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/quickstart_happybase/requirements-test.txt +++ b/samples/quickstart_happybase/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/data_client/requirements-test.txt b/samples/snippets/data_client/requirements-test.txt index bbeed6cc5..5cb431d92 100644 --- a/samples/snippets/data_client/requirements-test.txt +++ b/samples/snippets/data_client/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==8.0.0 +pytest==7.4.4 pytest-asyncio diff --git a/samples/snippets/deletes/requirements-test.txt b/samples/snippets/deletes/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/deletes/requirements-test.txt +++ b/samples/snippets/deletes/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/filters/requirements-test.txt b/samples/snippets/filters/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/filters/requirements-test.txt +++ b/samples/snippets/filters/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/reads/requirements-test.txt b/samples/snippets/reads/requirements-test.txt index 8075a1ec5..cb87efc0f 100644 --- a/samples/snippets/reads/requirements-test.txt +++ b/samples/snippets/reads/requirements-test.txt @@ -1 +1 @@ -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/snippets/writes/requirements-test.txt b/samples/snippets/writes/requirements-test.txt index aaa563abc..43b02e724 100644 --- a/samples/snippets/writes/requirements-test.txt +++ b/samples/snippets/writes/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==8.0.0 +pytest==7.4.4 diff --git a/samples/tableadmin/requirements-test.txt b/samples/tableadmin/requirements-test.txt index b4d30f505..aa143f59d 100644 --- a/samples/tableadmin/requirements-test.txt +++ b/samples/tableadmin/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==8.0.0 +pytest==7.4.4 google-cloud-testutils==1.4.0 From 9f6f26eb5246cce5bc50a2f5fadbee4223b024d8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 18 Mar 2024 10:25:11 -0700 Subject: [PATCH 19/24] reverted beam version --- samples/beam/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beam/requirements.txt b/samples/beam/requirements.txt index 70b1371ae..813fc8d2b 100644 --- a/samples/beam/requirements.txt +++ b/samples/beam/requirements.txt @@ -1,3 +1,3 @@ -apache-beam==2.53.0 +apache-beam==2.52.0 google-cloud-bigtable==2.22.0 google-cloud-core==2.4.1 From b650df0ce73f70bf2178d073e648e2b8c81760a6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 18 Mar 2024 11:28:39 -0700 Subject: [PATCH 20/24] removed broken 3.12 dependency --- samples/snippets/deletes/deletes_test.py | 37 ++++---- samples/snippets/deletes/requirements.txt | 1 - .../snippets/deletes/snapshots/__init__.py | 0 .../deletes/snapshots/snap_deletes_test.py | 24 ----- samples/snippets/filters/filters_test.py | 92 +++++++++++-------- samples/snippets/filters/requirements.txt | 1 - .../filters/snapshots/snap_filters_test.py | 42 ++++----- samples/snippets/reads/reads_test.py | 39 +++++--- samples/snippets/reads/requirements.txt | 1 - .../reads/snapshots/snap_reads_test.py | 23 +++-- 10 files changed, 130 insertions(+), 130 deletions(-) delete mode 100644 samples/snippets/deletes/snapshots/__init__.py delete mode 100644 samples/snippets/deletes/snapshots/snap_deletes_test.py diff --git a/samples/snippets/deletes/deletes_test.py b/samples/snippets/deletes/deletes_test.py index bf23daa59..bebaabafb 100644 --- a/samples/snippets/deletes/deletes_test.py +++ b/samples/snippets/deletes/deletes_test.py @@ -1,4 +1,5 @@ # Copyright 2020, Google LLC + # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -94,46 +95,46 @@ def table_id(): yield table_id -def assert_snapshot_match(capsys, snapshot): +def assert_output_match(capsys, expected): out, _ = capsys.readouterr() - snapshot.assert_match(out) + assert out == expected -def test_delete_from_column(capsys, snapshot, table_id): +def test_delete_from_column(capsys, table_id): deletes_snippets.delete_from_column(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_from_column_family(capsys, snapshot, table_id): +def test_delete_from_column_family(capsys, table_id): deletes_snippets.delete_from_column_family(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_from_row(capsys, snapshot, table_id): +def test_delete_from_row(capsys, table_id): deletes_snippets.delete_from_row(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_streaming_and_batching(capsys, snapshot, table_id): +def test_streaming_and_batching(capsys, table_id): deletes_snippets.streaming_and_batching(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_check_and_mutate(capsys, snapshot, table_id): +def test_check_and_mutate(capsys, table_id): deletes_snippets.check_and_mutate(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_drop_row_range(capsys, snapshot, table_id): +def test_drop_row_range(capsys, table_id): deletes_snippets.drop_row_range(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_column_family(capsys, snapshot, table_id): +def test_delete_column_family(capsys, table_id): deletes_snippets.delete_column_family(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") -def test_delete_table(capsys, snapshot, table_id): +def test_delete_table(capsys, table_id): deletes_snippets.delete_table(PROJECT, BIGTABLE_INSTANCE, table_id) - assert_snapshot_match(capsys, snapshot) + assert_output_match(capsys, "") diff --git a/samples/snippets/deletes/requirements.txt b/samples/snippets/deletes/requirements.txt index ae10593d2..6dc985893 100644 --- a/samples/snippets/deletes/requirements.txt +++ b/samples/snippets/deletes/requirements.txt @@ -1,2 +1 @@ google-cloud-bigtable==2.22.0 -snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/deletes/snapshots/__init__.py b/samples/snippets/deletes/snapshots/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/samples/snippets/deletes/snapshots/snap_deletes_test.py b/samples/snippets/deletes/snapshots/snap_deletes_test.py deleted file mode 100644 index 04a7db940..000000000 --- a/samples/snippets/deletes/snapshots/snap_deletes_test.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# snapshottest: v1 - https://goo.gl/zC4yUc -from __future__ import unicode_literals - -from snapshottest import Snapshot - - -snapshots = Snapshot() - -snapshots['test_check_and_mutate 1'] = '' - -snapshots['test_delete_column_family 1'] = '' - -snapshots['test_delete_from_column 1'] = '' - -snapshots['test_delete_from_column_family 1'] = '' - -snapshots['test_delete_from_row 1'] = '' - -snapshots['test_delete_table 1'] = '' - -snapshots['test_drop_row_range 1'] = '' - -snapshots['test_streaming_and_batching 1'] = '' diff --git a/samples/snippets/filters/filters_test.py b/samples/snippets/filters/filters_test.py index 35cf62ff0..7846669c5 100644 --- a/samples/snippets/filters/filters_test.py +++ b/samples/snippets/filters/filters_test.py @@ -16,11 +16,13 @@ import os import time import uuid +import inspect from google.cloud import bigtable import pytest +from .snapshots.snap_filters_test import snapshots -import filter_snippets +from . import filter_snippets PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] @@ -97,131 +99,147 @@ def table_id(): table.delete() -def test_filter_limit_row_sample(capsys, snapshot, table_id): +def test_filter_limit_row_sample(capsys, table_id): filter_snippets.filter_limit_row_sample(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() assert "Reading data for" in out -def test_filter_limit_row_regex(capsys, snapshot, table_id): +def test_filter_limit_row_regex(capsys, table_id): filter_snippets.filter_limit_row_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected - -def test_filter_limit_cells_per_col(capsys, snapshot, table_id): +def test_filter_limit_cells_per_col(capsys, table_id): filter_snippets.filter_limit_cells_per_col(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_cells_per_row(capsys, snapshot, table_id): +def test_filter_limit_cells_per_row(capsys, table_id): filter_snippets.filter_limit_cells_per_row(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_cells_per_row_offset(capsys, snapshot, table_id): +def test_filter_limit_cells_per_row_offset(capsys, table_id): filter_snippets.filter_limit_cells_per_row_offset( PROJECT, BIGTABLE_INSTANCE, table_id ) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_col_family_regex(capsys, snapshot, table_id): +def test_filter_limit_col_family_regex(capsys, table_id): filter_snippets.filter_limit_col_family_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_col_qualifier_regex(capsys, snapshot, table_id): +def test_filter_limit_col_qualifier_regex(capsys, table_id): filter_snippets.filter_limit_col_qualifier_regex( PROJECT, BIGTABLE_INSTANCE, table_id ) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_col_range(capsys, snapshot, table_id): +def test_filter_limit_col_range(capsys, table_id): filter_snippets.filter_limit_col_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_value_range(capsys, snapshot, table_id): +def test_filter_limit_value_range(capsys, table_id): filter_snippets.filter_limit_value_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_value_regex(capsys, snapshot, table_id): +def test_filter_limit_value_regex(capsys, table_id): filter_snippets.filter_limit_value_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_timestamp_range(capsys, snapshot, table_id): +def test_filter_limit_timestamp_range(capsys, table_id): filter_snippets.filter_limit_timestamp_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_block_all(capsys, snapshot, table_id): +def test_filter_limit_block_all(capsys, table_id): filter_snippets.filter_limit_block_all(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_limit_pass_all(capsys, snapshot, table_id): +def test_filter_limit_pass_all(capsys, table_id): filter_snippets.filter_limit_pass_all(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_modify_strip_value(capsys, snapshot, table_id): +def test_filter_modify_strip_value(capsys, table_id): filter_snippets.filter_modify_strip_value(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_modify_apply_label(capsys, snapshot, table_id): +def test_filter_modify_apply_label(capsys, table_id): filter_snippets.filter_modify_apply_label(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_composing_chain(capsys, snapshot, table_id): +def test_filter_composing_chain(capsys, table_id): filter_snippets.filter_composing_chain(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_composing_interleave(capsys, snapshot, table_id): +def test_filter_composing_interleave(capsys, table_id): filter_snippets.filter_composing_interleave(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_filter_composing_condition(capsys, snapshot, table_id): +def test_filter_composing_condition(capsys, table_id): filter_snippets.filter_composing_condition(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected diff --git a/samples/snippets/filters/requirements.txt b/samples/snippets/filters/requirements.txt index ae10593d2..6dc985893 100644 --- a/samples/snippets/filters/requirements.txt +++ b/samples/snippets/filters/requirements.txt @@ -1,2 +1 @@ google-cloud-bigtable==2.22.0 -snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/filters/snapshots/snap_filters_test.py b/samples/snippets/filters/snapshots/snap_filters_test.py index a0580f565..2331c93bc 100644 --- a/samples/snippets/filters/snapshots/snap_filters_test.py +++ b/samples/snippets/filters/snapshots/snap_filters_test.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -# snapshottest: v1 - https://goo.gl/zC4yUc -# flake8: noqa +# this was previously implemented using the `snapshottest` package (https://goo.gl/zC4yUc), +# which is not compatible with Python 3.12. So we moved to a standard dictionary storing +# expected outputs for each test from __future__ import unicode_literals -from snapshottest import Snapshot -snapshots = Snapshot() +snapshots = {} -snapshots['test_filter_limit_row_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_row_regex'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -27,7 +27,7 @@ ''' -snapshots['test_filter_limit_cells_per_col 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_cells_per_col'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -71,7 +71,7 @@ ''' -snapshots['test_filter_limit_cells_per_row 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_cells_per_row'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -102,7 +102,7 @@ ''' -snapshots['test_filter_limit_cells_per_row_offset 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_cells_per_row_offset'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 Column Family stats_summary @@ -132,7 +132,7 @@ ''' -snapshots['test_filter_limit_col_family_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_col_family_regex'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -164,7 +164,7 @@ ''' -snapshots['test_filter_limit_col_qualifier_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_col_qualifier_regex'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -191,7 +191,7 @@ ''' -snapshots['test_filter_limit_col_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_col_range'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -207,7 +207,7 @@ ''' -snapshots['test_filter_limit_value_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_value_range'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 @@ -217,7 +217,7 @@ ''' -snapshots['test_filter_limit_value_regex 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_value_regex'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 @@ -239,15 +239,15 @@ ''' -snapshots['test_filter_limit_timestamp_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_timestamp_range'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 ''' -snapshots['test_filter_limit_block_all 1'] = '' +snapshots['test_filter_limit_block_all'] = '' -snapshots['test_filter_limit_pass_all 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_limit_pass_all'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 @@ -291,7 +291,7 @@ ''' -snapshots['test_filter_modify_strip_value 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_modify_strip_value'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: @2019-05-01 00:00:00+00:00 \tdata_plan_01gb: @2019-04-30 23:00:00+00:00 @@ -335,7 +335,7 @@ ''' -snapshots['test_filter_modify_apply_label 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_modify_apply_label'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 [labelled] \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 [labelled] @@ -379,7 +379,7 @@ ''' -snapshots['test_filter_composing_chain 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_composing_chain'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 \tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 @@ -402,7 +402,7 @@ ''' -snapshots['test_filter_composing_interleave 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_composing_interleave'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 \tdata_plan_05gb: true @2019-05-01 00:00:00+00:00 @@ -435,7 +435,7 @@ ''' -snapshots['test_filter_composing_condition 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_filter_composing_condition'] = '''Reading data for phone#4c410523#20190501: Column Family cell_plan \tdata_plan_01gb: false @2019-05-01 00:00:00+00:00 [filtered-out] \tdata_plan_01gb: true @2019-04-30 23:00:00+00:00 [filtered-out] diff --git a/samples/snippets/reads/reads_test.py b/samples/snippets/reads/reads_test.py index 0b61e341f..da826d6fb 100644 --- a/samples/snippets/reads/reads_test.py +++ b/samples/snippets/reads/reads_test.py @@ -14,11 +14,13 @@ import datetime import os import uuid +import inspect from google.cloud import bigtable import pytest -import read_snippets +from .snapshots.snap_reads_test import snapshots +from . import read_snippets PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] @@ -72,50 +74,57 @@ def table_id(): table.delete() -def test_read_row(capsys, snapshot, table_id): +def test_read_row(capsys, table_id): read_snippets.read_row(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_row_partial(capsys, snapshot, table_id): +def test_read_row_partial(capsys, table_id): read_snippets.read_row_partial(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_rows(capsys, snapshot, table_id): +def test_read_rows(capsys, table_id): read_snippets.read_rows(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_row_range(capsys, snapshot, table_id): +def test_read_row_range(capsys, table_id): read_snippets.read_row_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_row_ranges(capsys, snapshot, table_id): +def test_read_row_ranges(capsys, table_id): read_snippets.read_row_ranges(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_prefix(capsys, snapshot, table_id): +def test_read_prefix(capsys, table_id): read_snippets.read_prefix(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected -def test_read_filter(capsys, snapshot, table_id): +def test_read_filter(capsys, table_id): read_snippets.read_filter(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - snapshot.assert_match(out) + expected = snapshots[inspect.currentframe().f_code.co_name] + assert out == expected diff --git a/samples/snippets/reads/requirements.txt b/samples/snippets/reads/requirements.txt index ae10593d2..6dc985893 100644 --- a/samples/snippets/reads/requirements.txt +++ b/samples/snippets/reads/requirements.txt @@ -1,2 +1 @@ google-cloud-bigtable==2.22.0 -snapshottest==0.6.0 \ No newline at end of file diff --git a/samples/snippets/reads/snapshots/snap_reads_test.py b/samples/snippets/reads/snapshots/snap_reads_test.py index f45e98f2e..564a4df7e 100644 --- a/samples/snippets/reads/snapshots/snap_reads_test.py +++ b/samples/snippets/reads/snapshots/snap_reads_test.py @@ -1,19 +1,18 @@ # -*- coding: utf-8 -*- -# snapshottest: v1 - https://goo.gl/zC4yUc +# this was previously implemented using the `snapshottest` package (https://goo.gl/zC4yUc), +# which is not compatible with Python 3.12. So we moved to a standard dictionary storing +# expected outputs for each test from __future__ import unicode_literals -from snapshottest import Snapshot +snapshots = {} - -snapshots = Snapshot() - -snapshots['test_read_row_partial 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row_partial'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 ''' -snapshots['test_read_rows 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_rows'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -27,7 +26,7 @@ ''' -snapshots['test_read_row_range 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row_range'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -47,7 +46,7 @@ ''' -snapshots['test_read_row_ranges 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row_ranges'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -79,7 +78,7 @@ ''' -snapshots['test_read_prefix 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_prefix'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 @@ -111,7 +110,7 @@ ''' -snapshots['test_read_filter 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_filter'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tos_build: PQ2A.190405.003 @2019-05-01 00:00:00+00:00 @@ -133,7 +132,7 @@ ''' -snapshots['test_read_row 1'] = '''Reading data for phone#4c410523#20190501: +snapshots['test_read_row'] = '''Reading data for phone#4c410523#20190501: Column Family stats_summary \tconnected_cell: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 \tconnected_wifi: \x00\x00\x00\x00\x00\x00\x00\x01 @2019-05-01 00:00:00+00:00 From 0b076d54a99efdec2b0a8b0201cf8096b53b208e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 18 Mar 2024 11:30:27 -0700 Subject: [PATCH 21/24] fixed lint --- samples/snippets/deletes/deletes_snippets.py | 4 +--- samples/snippets/deletes/noxfile.py | 15 ++++++++------- samples/snippets/filters/filters_test.py | 1 + samples/snippets/filters/noxfile.py | 15 ++++++++------- samples/snippets/reads/noxfile.py | 15 ++++++++------- 5 files changed, 26 insertions(+), 24 deletions(-) diff --git a/samples/snippets/deletes/deletes_snippets.py b/samples/snippets/deletes/deletes_snippets.py index 8e78083bf..72f812ca2 100644 --- a/samples/snippets/deletes/deletes_snippets.py +++ b/samples/snippets/deletes/deletes_snippets.py @@ -37,9 +37,7 @@ def delete_from_column_family(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) row = table.row("phone#4c410523#20190501") - row.delete_cells( - column_family_id="cell_plan", columns=row.ALL_COLUMNS - ) + row.delete_cells(column_family_id="cell_plan", columns=row.ALL_COLUMNS) row.commit() diff --git a/samples/snippets/deletes/noxfile.py b/samples/snippets/deletes/noxfile.py index 483b55901..3b7135946 100644 --- a/samples/snippets/deletes/noxfile.py +++ b/samples/snippets/deletes/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/filters/filters_test.py b/samples/snippets/filters/filters_test.py index 7846669c5..aedd8f08d 100644 --- a/samples/snippets/filters/filters_test.py +++ b/samples/snippets/filters/filters_test.py @@ -113,6 +113,7 @@ def test_filter_limit_row_regex(capsys, table_id): expected = snapshots[inspect.currentframe().f_code.co_name] assert out == expected + def test_filter_limit_cells_per_col(capsys, table_id): filter_snippets.filter_limit_cells_per_col(PROJECT, BIGTABLE_INSTANCE, table_id) diff --git a/samples/snippets/filters/noxfile.py b/samples/snippets/filters/noxfile.py index 483b55901..3b7135946 100644 --- a/samples/snippets/filters/noxfile.py +++ b/samples/snippets/filters/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/reads/noxfile.py b/samples/snippets/reads/noxfile.py index 483b55901..3b7135946 100644 --- a/samples/snippets/reads/noxfile.py +++ b/samples/snippets/reads/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From fa19b373ae9a6f89d7d85d37456b52c74b8adf9d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 18 Mar 2024 11:41:42 -0700 Subject: [PATCH 22/24] disable beam 3.12 test --- samples/beam/noxfile_config.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/samples/beam/noxfile_config.py b/samples/beam/noxfile_config.py index eb01435a0..e81c8122c 100644 --- a/samples/beam/noxfile_config.py +++ b/samples/beam/noxfile_config.py @@ -23,8 +23,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. "ignored_versions": [ - "2.7", # not supported - "3.10", # Beam wheels not yet released for Python 3.10 + "3.12", # Beam not yet supported for Python 3.12 ], # Old samples are opted out of enforcing Python type hints # All new samples should feature them From b8d94e4b72eab51f869574ce6e360866198579a1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 18 Mar 2024 12:59:38 -0700 Subject: [PATCH 23/24] added missing __init__.pys --- samples/snippets/filters/__init__.py | 0 samples/snippets/reads/__init__.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 samples/snippets/filters/__init__.py create mode 100644 samples/snippets/reads/__init__.py diff --git a/samples/snippets/filters/__init__.py b/samples/snippets/filters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/samples/snippets/reads/__init__.py b/samples/snippets/reads/__init__.py new file mode 100644 index 000000000..e69de29bb From 6b5704f41a4120c931675a200f36589f955d5feb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 18 Mar 2024 13:17:26 -0700 Subject: [PATCH 24/24] removed beam 3.7 test --- samples/beam/noxfile_config.py | 1 + samples/beam/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/beam/noxfile_config.py b/samples/beam/noxfile_config.py index e81c8122c..66d7bc5ac 100644 --- a/samples/beam/noxfile_config.py +++ b/samples/beam/noxfile_config.py @@ -23,6 +23,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. "ignored_versions": [ + "3.7", # Beam no longer supports Python 3.7 for new releases "3.12", # Beam not yet supported for Python 3.12 ], # Old samples are opted out of enforcing Python type hints diff --git a/samples/beam/requirements.txt b/samples/beam/requirements.txt index 813fc8d2b..86e305c22 100644 --- a/samples/beam/requirements.txt +++ b/samples/beam/requirements.txt @@ -1,3 +1,3 @@ -apache-beam==2.52.0 +apache-beam==2.54.0 google-cloud-bigtable==2.22.0 google-cloud-core==2.4.1