Skip to content

Commit

Permalink
Revert "Replace StorageV1 client with GCS client (#28079)" (#28721)
Browse files Browse the repository at this point in the history
This reverts commit 94812c0.
  • Loading branch information
damccorm authored Sep 28, 2023
1 parent e6dcd37 commit 56b1259
Show file tree
Hide file tree
Showing 24 changed files with 5,752 additions and 610 deletions.
6 changes: 0 additions & 6 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,6 @@
* ([#X](https://github.com/apache/beam/issues/X)).
-->

# [2.52.0] - Unreleased

## I/Os

* Python GCSIO is now implemented with GCP GCS Client instead of apitools ([#25676](https://github.com/apache/beam/issues/25676))

# [2.51.0] - Unreleased

## Highlights
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4790,6 +4790,10 @@ GBKTransform:
- from_runner_api_parameter
- to_runner_api_parameter
GcpTestIOError: {}
GcsDownloader:
methods:
- get_range
- size
GCSFileSystem:
methods:
- checksum
Expand Down Expand Up @@ -4833,6 +4837,10 @@ GcsIOError: {}
GcsIOOverrides:
methods:
- retry_func
GcsUploader:
methods:
- finish
- put
GeneralPurposeConsumerSet:
methods:
- flush
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ def format_user_score_sums(user_score):
(user, score) = user_score
return 'user: %s, total_score: %s' % (user, score)


( # pylint: disable=expression-not-assigned
p
| 'ReadInputText' >> beam.io.ReadFromText(args.input)
Expand Down
7 changes: 2 additions & 5 deletions sdks/python/apache_beam/internal/gcp/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,9 +112,6 @@ def __getattr__(self, attr):
"""Delegate attribute access to underlying google-auth credentials."""
return getattr(self._google_auth_credentials, attr)

def get_google_auth_credentials(self):
return self._google_auth_credentials


class _Credentials(object):
_credentials_lock = threading.Lock()
Expand All @@ -123,7 +120,7 @@ class _Credentials(object):

@classmethod
def get_service_credentials(cls, pipeline_options):
# type: (PipelineOptions) -> Optional[_ApitoolsCredentialsAdapter]
# type: (PipelineOptions) -> Optional[google.auth.credentials.Credentials]
with cls._credentials_lock:
if cls._credentials_init:
return cls._credentials
Expand All @@ -143,7 +140,7 @@ def get_service_credentials(cls, pipeline_options):

@staticmethod
def _get_service_credentials(pipeline_options):
# type: (PipelineOptions) -> Optional[_ApitoolsCredentialsAdapter]
# type: (PipelineOptions) -> Optional[google.auth.credentials.Credentials]
if not _GOOGLE_AUTH_AVAILABLE:
_LOGGER.warning(
'Unable to find default credentials because the google-auth library '
Expand Down
4 changes: 1 addition & 3 deletions sdks/python/apache_beam/io/gcp/bigquery_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -826,7 +826,6 @@ def test_streaming_inserts_flush_on_byte_size_limit(self, mock_insert):
exception_type=exceptions.ServiceUnavailable if exceptions else None,
error_message='backendError')
])
@unittest.skip('Not compatible with new GCS client. See GH issue #26334.')
def test_load_job_exception(self, exception_type, error_message):

with mock.patch.object(bigquery_v2_client.BigqueryV2.JobsService,
Expand Down Expand Up @@ -866,7 +865,6 @@ def test_load_job_exception(self, exception_type, error_message):
exception_type=exceptions.InternalServerError if exceptions else None,
error_message='internalError'),
])
@unittest.skip('Not compatible with new GCS client. See GH issue #26334.')
def test_copy_load_job_exception(self, exception_type, error_message):

from apache_beam.io.gcp import bigquery_file_loads
Expand All @@ -885,7 +883,7 @@ def test_copy_load_job_exception(self, exception_type, error_message):
mock.patch.object(BigQueryWrapper,
'wait_for_bq_job'), \
mock.patch('apache_beam.io.gcp.internal.clients'
'.storage.storage_v1_client.StorageV1.ObjectsService'),\
'.storage.storage_v1_client.StorageV1.ObjectsService'), \
mock.patch('time.sleep'), \
self.assertRaises(Exception) as exc, \
beam.Pipeline() as p:
Expand Down
4 changes: 0 additions & 4 deletions sdks/python/apache_beam/io/gcp/bigquery_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,9 +339,6 @@ class BigQueryWrapper(object):
offer a common place where retry logic for failures can be controlled.
In addition, it offers various functions used both in sources and sinks
(e.g., find and create tables, query a table, etc.).
Note that client parameter in constructor is only for testing purposes and
should not be used in production code.
"""

# If updating following names, also update the corresponding pydocs in
Expand All @@ -356,7 +353,6 @@ def __init__(self, client=None, temp_dataset_id=None, temp_table_ref=None):
self.gcp_bq_client = client or gcp_bigquery.Client(
client_info=ClientInfo(
user_agent="apache-beam-%s" % apache_beam.__version__))

self._unique_row_id = 0
# For testing scenarios where we pass in a client we do not want a
# randomized prefix for row IDs.
Expand Down
5 changes: 4 additions & 1 deletion sdks/python/apache_beam/io/gcp/bigquery_tools_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
from apitools.base.py.exceptions import HttpError, HttpForbiddenError
from google.api_core.exceptions import ClientError, DeadlineExceeded
from google.api_core.exceptions import InternalServerError
import google.cloud
except ImportError:
ClientError = None
DeadlineExceeded = None
Expand Down Expand Up @@ -223,9 +224,11 @@ def test_delete_dataset_retries_for_timeouts(self, patched_time_sleep):
wrapper._delete_dataset('', '')
self.assertTrue(client.datasets.Delete.called)

@unittest.skipIf(
google and not hasattr(google.cloud, '_http'), # pylint: disable=c-extension-no-member
'Dependencies not installed')
@mock.patch('time.sleep', return_value=None)
@mock.patch('google.cloud._http.JSONConnection.http')
@unittest.skip('Fails on import')
def test_user_agent_insert_all(self, http_mock, patched_sleep):
wrapper = beam.io.gcp.bigquery_tools.BigQueryWrapper()
try:
Expand Down
37 changes: 22 additions & 15 deletions sdks/python/apache_beam/io/gcp/gcsfilesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,18 +257,17 @@ def rename(self, source_file_names, destination_file_names):
exceptions = {}
for batch in gcs_batches:
copy_statuses = self._gcsIO().copy_batch(batch)
copy_succeeded = {}
delete_targets = []
copy_succeeded = []
for src, dest, exception in copy_statuses:
if exception:
exceptions[(src, dest)] = exception
else:
copy_succeeded[src] = dest
delete_targets.append(src)
delete_statuses = self._gcsIO().delete_batch(delete_targets)
for src, exception in delete_statuses:
copy_succeeded.append((src, dest))
delete_batch = [src for src, dest in copy_succeeded]
delete_statuses = self._gcsIO().delete_batch(delete_batch)
for i, (src, exception) in enumerate(delete_statuses):
dest = copy_succeeded[i][1]
if exception:
dest = copy_succeeded[src]
exceptions[(src, dest)] = exception

if exceptions:
Expand Down Expand Up @@ -341,7 +340,8 @@ def metadata(self, path):
"""
try:
file_metadata = self._gcsIO()._status(path)
return FileMetadata(path, file_metadata['size'], file_metadata['updated'])
return FileMetadata(
path, file_metadata['size'], file_metadata['last_updated'])
except Exception as e: # pylint: disable=broad-except
raise BeamIOError("Metadata operation failed", {path: e})

Expand All @@ -352,20 +352,27 @@ def delete(self, paths):
Args:
paths: list of paths that give the file objects to be deleted
"""

exceptions = {}

for path in paths:
def _delete_path(path):
"""Recursively delete the file or directory at the provided path.
"""
if path.endswith('/'):
path_to_use = path + '*'
else:
path_to_use = path
match_result = self.match([path_to_use])[0]
statuses = self._gcsIO().delete_batch(
[m.path for m in match_result.metadata_list])
for target, exception in statuses:
if exception:
exceptions[target] = exception
# pylint: disable=used-before-assignment
failures = [e for (_, e) in statuses if e is not None]
if failures:
raise failures[0]

exceptions = {}
for path in paths:
try:
_delete_path(path)
except Exception as e: # pylint: disable=broad-except
exceptions[path] = e

if exceptions:
raise BeamIOError("Delete operation failed", exceptions)
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
Instantiates a TestPipeline to get options such as GCP project name, but
doesn't actually start a Beam pipeline or test any specific runner.
Run the following in 'sdks/python' directory to run these tests manually:
scripts/run_integration_test.sh \
--test_opts apache_beam/io/gcp/gcsfilesystem_integration_test.py
To run these tests manually:
./gradlew :sdks:python:test-suites:dataflow:integrationTest \
-Dtests=apache_beam.io.gcp.gcsfilesystem_integration_test:GcsFileSystemIntegrationTest # pylint: disable=line-too-long
"""

# pytype: skip-file
Expand All @@ -49,6 +49,9 @@
class GcsFileSystemIntegrationTest(unittest.TestCase):

INPUT_FILE = 'gs://dataflow-samples/shakespeare/kinglear.txt'
# Larger than 1MB to test maxBytesRewrittenPerCall.
# Also needs to be in a different region than the dest to take effect.
INPUT_FILE_LARGE = 'gs://apache-beam-samples-us-east1/wikipedia_edits/wiki_data-000000000000.json' # pylint: disable=line-too-long

def setUp(self):
self.test_pipeline = TestPipeline(is_integration_test=True)
Expand Down
74 changes: 20 additions & 54 deletions sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def test_rename(self, mock_gcsio):
])

@mock.patch('apache_beam.io.gcp.gcsfilesystem.gcsio')
def test_rename_delete_error(self, mock_gcsio):
def test_rename_error(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
Expand All @@ -272,60 +272,25 @@ def test_rename_delete_error(self, mock_gcsio):
'gs://bucket/to2',
'gs://bucket/to3',
]
exception = IOError('Failed')
gcsio_mock.delete_batch.side_effect = [[(f, exception) for f in sources]]
gcsio_mock.copy_batch.side_effect = [[
('gs://bucket/from1', 'gs://bucket/to1', None),
('gs://bucket/from2', 'gs://bucket/to2', None),
('gs://bucket/from3', 'gs://bucket/to3', None),
]]
gcsio_mock.delete_batch.side_effect = [[
('gs://bucket/from1', None),
('gs://bucket/from2', Exception("BadThings")),
('gs://bucket/from3', None),
]]

# Issue batch rename.
with self.assertRaisesRegex(BeamIOError, r'^Rename operation failed'):
self.fs.rename(sources, destinations)

gcsio_mock.copy_batch.assert_called_once_with([
('gs://bucket/from1', 'gs://bucket/to1'),
('gs://bucket/from2', 'gs://bucket/to2'),
('gs://bucket/from3', 'gs://bucket/to3'),
])
gcsio_mock.delete_batch.assert_called_once_with([
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
])

@mock.patch('apache_beam.io.gcp.gcsfilesystem.gcsio')
def test_rename_copy_error(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
sources = [
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
]
destinations = [
'gs://bucket/to1',
'gs://bucket/to2',
'gs://bucket/to3',
]
gcsio_mock.copy_batch.side_effect = [[
('gs://bucket/from1', 'gs://bucket/to1', None),
('gs://bucket/from2', 'gs://bucket/to2', Exception("BadThings")),
('gs://bucket/from3', 'gs://bucket/to3', None),
]]
gcsio_mock.delete_batch.side_effect = [[
('gs://bucket/from1', None),
('gs://bucket/from3', None),
]]
expected_results = {
(s, d): exception
for s, d in zip(sources, destinations)
}

# Issue batch rename.
with self.assertRaisesRegex(BeamIOError, r'^Rename operation failed'):
with self.assertRaisesRegex(BeamIOError,
r'^Rename operation failed') as error:
self.fs.rename(sources, destinations)
self.assertEqual(error.exception.exception_details, expected_results)

gcsio_mock.copy_batch.assert_called_once_with([
('gs://bucket/from1', 'gs://bucket/to1'),
Expand All @@ -334,6 +299,7 @@ def test_rename_copy_error(self, mock_gcsio):
])
gcsio_mock.delete_batch.assert_called_once_with([
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
])

Expand All @@ -342,7 +308,7 @@ def test_delete(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
gcsio_mock._status.return_value = {'size': 0, 'updated': 99999.0}
gcsio_mock._status.return_value = {'size': 0, 'last_updated': 99999.0}
files = [
'gs://bucket/from1',
'gs://bucket/from2',
Expand All @@ -358,21 +324,21 @@ def test_delete_error(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock

gcsio_mock._status.return_value = {'size': 0, 'updated': 99999.0}
exception = IOError('Failed')
gcsio_mock.delete_batch.side_effect = exception
gcsio_mock._status.return_value = {'size': 0, 'last_updated': 99999.0}
files = [
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
]
gcsio_mock.delete_batch.side_effect = [
[('gs://bucket/from1', None)],
[('gs://bucket/from2', Exception("BadThings"))],
[('gs://bucket/from3', None)],
]
expected_results = {f: exception for f in files}

# Issue batch delete.
with self.assertRaisesRegex(BeamIOError, r'^Delete operation failed'):
with self.assertRaisesRegex(BeamIOError,
r'^Delete operation failed') as error:
self.fs.delete(files)
self.assertEqual(error.exception.exception_details, expected_results)
gcsio_mock.delete_batch.assert_called()


Expand Down
Loading

0 comments on commit 56b1259

Please sign in to comment.