Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: revise blob.compose query parameters if_generation_match #454

Merged
merged 16 commits into from Jun 14, 2021
98 changes: 58 additions & 40 deletions google/cloud/storage/blob.py
Expand Up @@ -2934,35 +2934,35 @@ def compose(

:type if_generation_match: long
:param if_generation_match:
(Optional) Make the operation conditional on whether the blob's
current generation matches the given value. Setting to 0 makes the
operation succeed only if there are no live versions of the blob.
(Optional) Makes the operation conditional on whether the
destination object's current generation matches the given value.
Setting to 0 makes the operation succeed only if there are no live
versions of the object.

If a list of long is passed in, makes the operation conditional on whether the
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would recommend phrasing this something like:

"Note: In a previous version, this argument worked identically to the if_source_generation_match argument. For backwards-compatibility reasons, if a list is passed in, this argument will behave like if_source_generation_match and also issue a DeprecationWarning."

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done. I like this phrasing a lot, thanks!

current generation of each source blob matches the corresponding generation.
The list must match ``sources`` item-to-item.
(Deprecated: type(list of long) is supported for backwards-compatability reasons only.
Please use if_source_generation_match instead to match source objects generations.)
tseaver marked this conversation as resolved.
Show resolved Hide resolved

:type if_metageneration_match: list of long
:type if_metageneration_match: long
:param if_metageneration_match:
(Optional) Make the operation conditional on whether the blob's
current metageneration matches the given value. The list must match
``sources`` item-to-item.
(Optional) Makes the operation conditional on whether the
destination object's current metageneration matches the given
value.

If a list of long is passed in, no match operation will be performed.
cojenco marked this conversation as resolved.
Show resolved Hide resolved
(Deprecated: type(list of long) is supported for backwards-compatability reasons only.)

:type if_source_generation_match: list of long
:param if_source_generation_match:
(Optional) Make the operation conditional on whether the current generation
(Optional) Makes the operation conditional on whether the current generation
of each source blob matches the corresponding generation.
The list must match ``sources`` item-to-item.

:type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
:param retry: (Optional) How to retry the RPC. A None value will disable retries.
A google.api_core.retry.Retry value will enable retries, and the object will
define retriable response codes and errors and configure backoff and timeout options.

A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and
activates it only if certain conditions are met. This class exists to provide safe defaults
for RPC calls that are not technically safe to retry normally (due to potential data
duplication or other side-effects) but become safe to retry if a condition such as
if_generation_match is set.

See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for
information on retry types and how to configure them.
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`

Example:
Compose blobs using source generation match preconditions.
Expand All @@ -2979,18 +2979,36 @@ def compose(
>>> composed_blob.compose(blobs, if_source_generation_match=if_source_generation_match)
"""
sources_len = len(sources)
tseaver marked this conversation as resolved.
Show resolved Hide resolved

client = self._require_client(client)
query_params = {}

if self.user_project is not None:
query_params["userProject"] = self.user_project
if isinstance(if_generation_match, list):
warnings.warn(
"if_generation_match: type list is deprecated and supported for backwards-compatability reasons only."
cojenco marked this conversation as resolved.
Show resolved Hide resolved
"Use if_source_generation_match instead to match source objects generations.",
PendingDeprecationWarning,
cojenco marked this conversation as resolved.
Show resolved Hide resolved
stacklevel=1,
)

_add_generation_match_parameters(
query_params,
if_generation_match=if_generation_match,
if_metageneration_match=if_metageneration_match,
)
if if_source_generation_match is not None:
raise ValueError(
"Use if_generation_match to match the generation of the destination object."
cojenco marked this conversation as resolved.
Show resolved Hide resolved
"Use if_source_generation_match to match source objects generations."
)

# if_generation_match: type list is deprecated. Instead use if_source_generation_match.
if_source_generation_match = if_generation_match
if_generation_match = None

if isinstance(if_metageneration_match, list):
warnings.warn(
"if_metageneration_match matches the given value to the destination object's metageneration."
"Please pass in a single value (type long).",
PendingDeprecationWarning,
cojenco marked this conversation as resolved.
Show resolved Hide resolved
stacklevel=1,
)

if_metageneration_match = None

if if_source_generation_match is None:
if_source_generation_match = [None] * sources_len
Expand All @@ -3016,6 +3034,16 @@ def compose(
"sourceObjects": source_objects,
"destination": self._properties.copy(),
}

if self.user_project is not None:
query_params["userProject"] = self.user_project

_add_generation_match_parameters(
query_params,
if_generation_match=if_generation_match,
if_metageneration_match=if_metageneration_match,
)

api_response = client._post_resource(
"{}/compose".format(self.path),
request,
Expand Down Expand Up @@ -3114,18 +3142,8 @@ def rewrite(
object's current metageneration does not match the given value.

:type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
:param retry: (Optional) How to retry the RPC. A None value will disable retries.
A google.api_core.retry.Retry value will enable retries, and the object will
define retriable response codes and errors and configure backoff and timeout options.

A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and
activates it only if certain conditions are met. This class exists to provide safe defaults
for RPC calls that are not technically safe to retry normally (due to potential data
duplication or other side-effects) but become safe to retry if a condition such as
if_generation_match is set.

See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for
information on retry types and how to configure them.
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`

:rtype: tuple
:returns: ``(token, bytes_rewritten, total_bytes)``, where ``token``
Expand Down
36 changes: 32 additions & 4 deletions tests/system/test_system.py
Expand Up @@ -1723,7 +1723,7 @@ def test_compose_replace_existing_blob(self):
composed = original.download_as_bytes()
self.assertEqual(composed, BEFORE + TO_APPEND)

def test_compose_with_source_generation_match(self):
def test_compose_with_generation_match_list(self):
BEFORE = b"AAA\n"
original = self.bucket.blob("original")
original.content_type = "text/plain"
Expand All @@ -1736,17 +1736,22 @@ def test_compose_with_source_generation_match(self):
self.case_blobs_to_delete.append(to_append)

with self.assertRaises(google.api_core.exceptions.PreconditionFailed):
original.compose([original, to_append], if_source_generation_match=[6, 7])
original.compose(
[original, to_append],
if_generation_match=[6, 7],
if_metageneration_match=[8, 9],
)
cojenco marked this conversation as resolved.
Show resolved Hide resolved

original.compose(
[original, to_append],
if_source_generation_match=[original.generation, to_append.generation],
if_generation_match=[original.generation, to_append.generation],
if_metageneration_match=[original.metageneration, to_append.metageneration],
)

composed = original.download_as_bytes()
self.assertEqual(composed, BEFORE + TO_APPEND)
cojenco marked this conversation as resolved.
Show resolved Hide resolved

def test_compose_with_generation_match(self):
def test_compose_with_generation_match_long(self):
BEFORE = b"AAA\n"
original = self.bucket.blob("original")
original.content_type = "text/plain"
Expand All @@ -1766,6 +1771,29 @@ def test_compose_with_generation_match(self):
composed = original.download_as_bytes()
self.assertEqual(composed, BEFORE + TO_APPEND)

def test_compose_with_source_generation_match(self):
BEFORE = b"AAA\n"
original = self.bucket.blob("original")
original.content_type = "text/plain"
original.upload_from_string(BEFORE)
self.case_blobs_to_delete.append(original)

TO_APPEND = b"BBB\n"
to_append = self.bucket.blob("to_append")
to_append.upload_from_string(TO_APPEND)
self.case_blobs_to_delete.append(to_append)

with self.assertRaises(google.api_core.exceptions.PreconditionFailed):
original.compose([original, to_append], if_source_generation_match=[6, 7])

original.compose(
[original, to_append],
if_source_generation_match=[original.generation, to_append.generation],
)

composed = original.download_as_bytes()
self.assertEqual(composed, BEFORE + TO_APPEND)

@unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.")
def test_compose_with_user_project(self):
new_bucket_name = "compose-user-project" + unique_resource_id("-")
Expand Down
119 changes: 119 additions & 0 deletions tests/unit/test_blob.py
Expand Up @@ -3734,6 +3734,125 @@ def test_compose_w_generation_match(self):
_target_object=destination,
)

@mock.patch("warnings.warn")
def test_compose_w_generation_match_w_warning(self, mock_warn):
source_1_name = "source-1"
source_2_name = "source-2"
destination_name = "destination"
api_response = {}
generation_numbers = [6, 9]

client = mock.Mock(spec=["_post_resource"])
client._post_resource.return_value = api_response
bucket = _Bucket(client=client)
source_1 = self._make_one(source_1_name, bucket=bucket)
source_2 = self._make_one(source_2_name, bucket=bucket)

destination = self._make_one(destination_name, bucket=bucket)
destination.compose(
sources=[source_1, source_2], if_generation_match=generation_numbers,
)

expected_path = "/b/name/o/%s/compose" % destination_name
expected_data = {
"sourceObjects": [
{
"name": source_1_name,
"generation": None,
"objectPreconditions": {
"ifGenerationMatch": generation_numbers[0],
},
},
{
"name": source_2_name,
"generation": None,
"objectPreconditions": {
"ifGenerationMatch": generation_numbers[1],
},
},
],
"destination": {},
}
expected_query_params = {}
client._post_resource.assert_called_once_with(
expected_path,
expected_data,
query_params=expected_query_params,
timeout=self._get_default_timeout(),
retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
_target_object=destination,
)

mock_warn.assert_called_with(
"if_generation_match: type list is deprecated and supported for backwards-compatability reasons only."
"Use if_source_generation_match instead to match source objects generations.",
PendingDeprecationWarning,
stacklevel=1,
)

def test_compose_invalid_generation_match(self):
source_1_name = "source-1"
source_2_name = "source-2"
destination_name = "destination"
source_generation_numbers = [6, 8]
client = mock.Mock(spec=["_post_resource"])
bucket = _Bucket(client=client)
source_1 = self._make_one(source_1_name, bucket=bucket)
source_2 = self._make_one(source_2_name, bucket=bucket)

destination = self._make_one(destination_name, bucket=bucket)

with self.assertRaises(ValueError):
destination.compose(
sources=[source_1, source_2],
if_generation_match=source_generation_numbers,
if_source_generation_match=source_generation_numbers,
)

client._post_resource.assert_not_called()

@mock.patch("warnings.warn")
def test_compose_w_metageneration_match_w_warning(self, mock_warn):
source_1_name = "source-1"
source_2_name = "source-2"
destination_name = "destination"
metageneration_number = [6]
client = mock.Mock(spec=["_post_resource"])
bucket = _Bucket(client=client)
source_1 = self._make_one(source_1_name, bucket=bucket)
source_2 = self._make_one(source_2_name, bucket=bucket)

destination = self._make_one(destination_name, bucket=bucket)

destination.compose(
sources=[source_1, source_2], if_metageneration_match=metageneration_number,
)

expected_path = "/b/name/o/%s/compose" % destination_name
expected_data = {
"sourceObjects": [
{"name": source_1_name, "generation": None},
{"name": source_2_name, "generation": None},
],
"destination": {},
}
expected_query_params = {}
client._post_resource.assert_called_once_with(
expected_path,
expected_data,
query_params=expected_query_params,
timeout=self._get_default_timeout(),
retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
_target_object=destination,
)

mock_warn.assert_called_with(
"if_metageneration_match matches the given value to the destination object's metageneration."
"Please pass in a single value (type long).",
PendingDeprecationWarning,
stacklevel=1,
)

def test_compose_w_metageneration_match(self):
source_1_name = "source-1"
source_2_name = "source-2"
Expand Down