From b11aa5f00753b094580847bc62c154ae0e584dbc Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Wed, 22 Apr 2020 20:32:02 +0300 Subject: [PATCH] feat(storage): add arguments for *GenerationMatch uploading options (#111) * feat(storage): add arguments for *GenerationMatch uploading options * add unit tests changes * change args names in unit tests * add unit tests for _do_multipart_upload() * add unit tests for _initiate_resumable_upload() * add args translation into unit tests urls * update args docs --- google/cloud/storage/blob.py | 322 +++++++++++++++++++++++++++++++++-- tests/system/test_system.py | 27 +++ tests/unit/test_blob.py | 207 ++++++++++++++++++++-- 3 files changed, 530 insertions(+), 26 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 7358d5794..4200c9dd4 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -185,10 +185,9 @@ def __init__( self.chunk_size = chunk_size # Check that setter accepts value. self._bucket = bucket self._acl = ObjectACL(self) - if encryption_key is not None and kms_key_name is not None: - raise ValueError( - "Pass at most one of 'encryption_key' " "and 'kms_key_name'" - ) + _raise_for_more_than_one_none( + encryption_key=encryption_key, kms_key_name=kms_key_name, + ) self._encryption_key = encryption_key @@ -974,7 +973,17 @@ def _get_upload_arguments(self, content_type): return headers, object_metadata, content_type def _do_multipart_upload( - self, client, stream, content_type, size, num_retries, predefined_acl + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ): """Perform a multipart upload. @@ -1007,6 +1016,27 @@ def _do_multipart_upload( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. @@ -1037,6 +1067,20 @@ def _do_multipart_upload( if predefined_acl is not None: name_value_pairs.append(("predefinedAcl", predefined_acl)) + if if_generation_match is not None: + name_value_pairs.append(("ifGenerationMatch", if_generation_match)) + + if if_generation_not_match is not None: + name_value_pairs.append(("ifGenerationNotMatch", if_generation_not_match)) + + if if_metageneration_match is not None: + name_value_pairs.append(("ifMetagenerationMatch", if_metageneration_match)) + + if if_metageneration_not_match is not None: + name_value_pairs.append( + ("ifMetaGenerationNotMatch", if_metageneration_not_match) + ) + upload_url = _add_query_parameters(base_url, name_value_pairs) upload = MultipartUpload(upload_url, headers=headers) @@ -1059,6 +1103,10 @@ def _initiate_resumable_upload( predefined_acl=None, extra_headers=None, chunk_size=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): """Initiate a resumable upload. @@ -1102,6 +1150,27 @@ def _initiate_resumable_upload( If not passed, will fall back to the chunk size on the current blob. + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + :rtype: tuple :returns: Pair of @@ -1133,6 +1202,20 @@ def _initiate_resumable_upload( if predefined_acl is not None: name_value_pairs.append(("predefinedAcl", predefined_acl)) + if if_generation_match is not None: + name_value_pairs.append(("ifGenerationMatch", if_generation_match)) + + if if_generation_not_match is not None: + name_value_pairs.append(("ifGenerationNotMatch", if_generation_not_match)) + + if if_metageneration_match is not None: + name_value_pairs.append(("ifMetagenerationMatch", if_metageneration_match)) + + if if_metageneration_not_match is not None: + name_value_pairs.append( + ("ifMetaGenerationNotMatch", if_metageneration_not_match) + ) + upload_url = _add_query_parameters(base_url, name_value_pairs) upload = ResumableUpload(upload_url, chunk_size, headers=headers) @@ -1153,7 +1236,17 @@ def _initiate_resumable_upload( return upload, transport def _do_resumable_upload( - self, client, stream, content_type, size, num_retries, predefined_acl + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ): """Perform a resumable upload. @@ -1188,6 +1281,27 @@ def _do_resumable_upload( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the final chunk is uploaded. @@ -1199,6 +1313,10 @@ def _do_resumable_upload( size, num_retries, predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) while not upload.finished: @@ -1207,7 +1325,17 @@ def _do_resumable_upload( return response def _do_upload( - self, client, stream, content_type, size, num_retries, predefined_acl + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ): """Determine an upload strategy and then perform the upload. @@ -1244,6 +1372,27 @@ def _do_upload( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + :rtype: dict :returns: The parsed JSON from the "200 OK" response. This will be the **only** response in the multipart case and it will be the @@ -1251,11 +1400,29 @@ def _do_upload( """ if size is not None and size <= _MAX_MULTIPART_SIZE: response = self._do_multipart_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) else: response = self._do_resumable_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) return response.json() @@ -1269,6 +1436,10 @@ def upload_from_file( num_retries=None, client=None, predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): """Upload the contents of this blob from a file-like object. @@ -1330,6 +1501,27 @@ def upload_from_file( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -1340,19 +1532,46 @@ def upload_from_file( if num_retries is not None: warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + _raise_for_more_than_one_none( + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + ) + + _raise_for_more_than_one_none( + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + ) + _maybe_rewind(file_obj, rewind=rewind) predefined_acl = ACL.validate_predefined(predefined_acl) try: created_json = self._do_upload( - client, file_obj, content_type, size, num_retries, predefined_acl + client, + file_obj, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) def upload_from_filename( - self, filename, content_type=None, client=None, predefined_acl=None + self, + filename, + content_type=None, + client=None, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): """Upload this blob's contents from the content of a named file. @@ -1390,6 +1609,27 @@ def upload_from_filename( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. """ content_type = self._get_content_type(content_type, filename=filename) @@ -1401,10 +1641,22 @@ def upload_from_filename( client=client, size=total_bytes, predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) def upload_from_string( - self, data, content_type="text/plain", client=None, predefined_acl=None + self, + data, + content_type="text/plain", + client=None, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): """Upload contents of this blob from the provided string. @@ -1437,6 +1689,27 @@ def upload_from_string( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + + :type if_generation_match: long + :param if_generation_match: (Optional) Make the operation conditional on whether + the blob's current generation matches the given value. + Setting to 0 makes the operation succeed only if there + are no live versions of the blob. + + :type if_generation_not_match: long + :param if_generation_not_match: (Optional) Make the operation conditional on whether + the blob's current generation does not match the given + value. If no live blob exists, the precondition fails. + Setting to 0 makes the operation succeed only if there + is a live version of the blob. + + :type if_metageneration_match: long + :param if_metageneration_match: (Optional) Make the operation conditional on whether the + blob's current metageneration matches the given value. + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the + blob's current metageneration does not match the given value. """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -1446,6 +1719,10 @@ def upload_from_string( content_type=content_type, client=client, predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) def create_resumable_upload_session( @@ -2354,3 +2631,24 @@ def _add_query_parameters(base_url, name_value_pairs): query = parse_qsl(query) query.extend(name_value_pairs) return urlunsplit((scheme, netloc, path, urlencode(query), frag)) + + +def _raise_for_more_than_one_none(**kwargs): + """Raise ``ValueError`` exception if more than one parameter was set. + + :type error: :exc:`ValueError` + :param error: Description of which fields were set + + :raises: :class:`~ValueError` containing the fields that were set + """ + if sum(arg is not None for arg in kwargs.values()) > 1: + escaped_keys = ["'%s'" % name for name in kwargs.keys()] + + keys_but_last = ", ".join(escaped_keys[:-1]) + last_key = escaped_keys[-1] + + msg = "Pass at most one of {keys_but_last} and {last_key}".format( + keys_but_last=keys_but_last, last_key=last_key + ) + + raise ValueError(msg) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index ef4bc051b..675758794 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -33,6 +33,7 @@ from google.cloud.storage.bucket import LifecycleRuleDelete from google.cloud.storage.bucket import LifecycleRuleSetStorageClass from google.cloud import kms +import google.api_core import google.oauth2 from test_utils.retry import RetryErrors from test_utils.system import unique_resource_id @@ -710,6 +711,32 @@ def test_upload_gzip_encoded_download_raw(self): raw = blob.download_as_string(raw_download=True) self.assertEqual(raw, zipped) + def test_resumable_upload_with_generation_match(self): + blob = self.bucket.blob("LargeFile") + + # uploading the file + file_data = self.FILES["big"] + with open(file_data["path"], "rb") as file_obj: + blob.upload_from_file(file_obj) + self.case_blobs_to_delete.append(blob) + + # reuploading with correct generations numbers + with open(file_data["path"], "rb") as file_obj: + blob.upload_from_file( + file_obj, + if_generation_match=blob.generation, + if_metageneration_match=blob.metageneration, + ) + + # reuploading with generations numbers that doesn't match original + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + with open(file_data["path"], "rb") as file_obj: + blob.upload_from_file(file_obj, if_generation_match=3) + + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + with open(file_data["path"], "rb") as file_obj: + blob.upload_from_file(file_obj, if_metageneration_match=3) + class TestUnicode(unittest.TestCase): @vpcsc_config.skip_if_inside_vpcsc diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 91af38908..4ae0e21b3 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -1303,6 +1303,10 @@ def _do_multipart_success( num_retries=None, user_project=None, predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, kms_key_name=None, ): from six.moves.urllib.parse import urlencode @@ -1320,7 +1324,16 @@ def _do_multipart_success( stream = io.BytesIO(data) content_type = u"application/xml" response = blob._do_multipart_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) # Check the mocks and the returned value. @@ -1349,6 +1362,18 @@ def _do_multipart_success( if kms_key_name is not None: qs_params.append(("kmsKeyName", kms_key_name)) + if if_generation_match is not None: + qs_params.append(("ifGenerationMatch", if_generation_match)) + + if if_generation_not_match is not None: + qs_params.append(("ifGenerationNotMatch", if_generation_not_match)) + + if if_metageneration_match is not None: + qs_params.append(("ifMetagenerationMatch", if_metageneration_match)) + + if if_metageneration_not_match is not None: + qs_params.append(("ifMetaGenerationNotMatch", if_metageneration_not_match)) + upload_url += "?" + urlencode(qs_params) payload = ( @@ -1392,6 +1417,18 @@ def test__do_multipart_upload_with_kms(self, mock_get_boundary): def test__do_multipart_upload_with_retry(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, num_retries=8) + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_with_generation_match(self, mock_get_boundary): + self._do_multipart_success( + mock_get_boundary, if_generation_match=4, if_metageneration_match=4 + ) + + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_with_generation_not_match(self, mock_get_boundary): + self._do_multipart_success( + mock_get_boundary, if_generation_not_match=4, if_metageneration_not_match=4 + ) + def test__do_multipart_upload_bad_size(self): blob = self._make_one(u"blob-name", bucket=None) @@ -1401,7 +1438,9 @@ def test__do_multipart_upload_bad_size(self): self.assertGreater(size, len(data)) with self.assertRaises(ValueError) as exc_info: - blob._do_multipart_upload(None, stream, None, size, None, None) + blob._do_multipart_upload( + None, stream, None, size, None, None, None, None, None, None + ) exc_contents = str(exc_info.exception) self.assertIn("was specified but the file-like object only had", exc_contents) @@ -1415,6 +1454,10 @@ def _initiate_resumable_helper( num_retries=None, user_project=None, predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, blob_chunk_size=786432, kms_key_name=None, ): @@ -1455,6 +1498,10 @@ def _initiate_resumable_helper( extra_headers=extra_headers, chunk_size=chunk_size, predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) # Check the returned values. @@ -1474,6 +1521,18 @@ def _initiate_resumable_helper( if kms_key_name is not None: qs_params.append(("kmsKeyName", kms_key_name)) + if if_generation_match is not None: + qs_params.append(("ifGenerationMatch", if_generation_match)) + + if if_generation_not_match is not None: + qs_params.append(("ifGenerationNotMatch", if_generation_not_match)) + + if if_metageneration_match is not None: + qs_params.append(("ifMetagenerationMatch", if_metageneration_match)) + + if if_metageneration_not_match is not None: + qs_params.append(("ifMetaGenerationNotMatch", if_metageneration_not_match)) + upload_url += "?" + urlencode(qs_params) self.assertEqual(upload.upload_url, upload_url) @@ -1558,6 +1617,16 @@ def test__initiate_resumable_upload_with_extra_headers(self): def test__initiate_resumable_upload_with_retry(self): self._initiate_resumable_helper(num_retries=11) + def test__initiate_resumable_upload_with_generation_match(self): + self._initiate_resumable_helper( + if_generation_match=4, if_metageneration_match=4 + ) + + def test__initiate_resumable_upload_with_generation_not_match(self): + self._initiate_resumable_helper( + if_generation_not_match=4, if_metageneration_not_match=4 + ) + def test__initiate_resumable_upload_with_predefined_acl(self): self._initiate_resumable_helper(predefined_acl="private") @@ -1580,7 +1649,16 @@ def _make_resumable_transport(self, headers1, headers2, headers3, total_bytes): return fake_transport, responses @staticmethod - def _do_resumable_upload_call0(blob, content_type, size=None, predefined_acl=None): + def _do_resumable_upload_call0( + blob, + content_type, + size=None, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + ): # First mock transport.request() does initiates upload. upload_url = ( "https://storage.googleapis.com/upload/storage/v1" @@ -1602,7 +1680,16 @@ def _do_resumable_upload_call0(blob, content_type, size=None, predefined_acl=Non @staticmethod def _do_resumable_upload_call1( - blob, content_type, data, resumable_url, size=None, predefined_acl=None + blob, + content_type, + data, + resumable_url, + size=None, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): # Second mock transport.request() does sends first chunk. if size is None: @@ -1625,7 +1712,16 @@ def _do_resumable_upload_call1( @staticmethod def _do_resumable_upload_call2( - blob, content_type, data, resumable_url, total_bytes, predefined_acl=None + blob, + content_type, + data, + resumable_url, + total_bytes, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): # Third mock transport.request() does sends last chunk. content_range = "bytes {:d}-{:d}/{:d}".format( @@ -1645,7 +1741,14 @@ def _do_resumable_upload_call2( ) def _do_resumable_helper( - self, use_size=False, num_retries=None, predefined_acl=None + self, + use_size=False, + num_retries=None, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, ): bucket = _Bucket(name="yesterday") blob = self._make_one(u"blob-name", bucket=bucket) @@ -1673,7 +1776,16 @@ def _do_resumable_helper( stream = io.BytesIO(data) content_type = u"text/html" response = blob._do_resumable_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) # Check the returned values. @@ -1682,7 +1794,14 @@ def _do_resumable_helper( # Check the mocks. call0 = self._do_resumable_upload_call0( - blob, content_type, size=size, predefined_acl=predefined_acl + blob, + content_type, + size=size, + predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) call1 = self._do_resumable_upload_call1( blob, @@ -1691,6 +1810,10 @@ def _do_resumable_helper( resumable_url, size=size, predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) call2 = self._do_resumable_upload_call2( blob, @@ -1699,6 +1822,10 @@ def _do_resumable_helper( resumable_url, total_bytes, predefined_acl=predefined_acl, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, ) self.assertEqual(transport.request.mock_calls, [call0, call1, call2]) @@ -1715,7 +1842,15 @@ def test__do_resumable_upload_with_predefined_acl(self): self._do_resumable_helper(predefined_acl="private") def _do_upload_helper( - self, chunk_size=None, num_retries=None, predefined_acl=None, size=None + self, + chunk_size=None, + num_retries=None, + predefined_acl=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + size=None, ): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE @@ -1741,19 +1876,46 @@ def _do_upload_helper( size = 12345654321 # Make the request and check the mocks. created_json = blob._do_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() if size is not None and size <= _MAX_MULTIPART_SIZE: blob._do_multipart_upload.assert_called_once_with( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) blob._do_resumable_upload.assert_not_called() else: blob._do_multipart_upload.assert_not_called() blob._do_resumable_upload.assert_called_once_with( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) def test__do_upload_uses_multipart(self): @@ -1788,6 +1950,10 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): content_type = u"font/woff" client = mock.sentinel.client predefined_acl = kwargs.get("predefined_acl", None) + if_generation_match = kwargs.get("if_generation_match", None) + if_generation_not_match = kwargs.get("if_generation_not_match", None) + if_metageneration_match = kwargs.get("if_metageneration_match", None) + if_metageneration_not_match = kwargs.get("if_metageneration_not_match", None) ret_val = blob.upload_from_file( stream, size=len(data), content_type=content_type, client=client, **kwargs ) @@ -1800,7 +1966,16 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): # Check the mock. num_retries = kwargs.get("num_retries") blob._do_upload.assert_called_once_with( - client, stream, content_type, len(data), num_retries, predefined_acl + client, + stream, + content_type, + len(data), + num_retries, + predefined_acl, + if_generation_match, + if_generation_not_match, + if_metageneration_match, + if_metageneration_not_match, ) return stream @@ -1844,12 +2019,16 @@ def _do_upload_mock_call_helper(self, blob, client, content_type, size): mock_call = blob._do_upload.mock_calls[0] call_name, pos_args, kwargs = mock_call self.assertEqual(call_name, "") - self.assertEqual(len(pos_args), 6) + self.assertEqual(len(pos_args), 10) self.assertEqual(pos_args[0], client) self.assertEqual(pos_args[2], content_type) self.assertEqual(pos_args[3], size) self.assertIsNone(pos_args[4]) # num_retries self.assertIsNone(pos_args[5]) # predefined_acl + self.assertIsNone(pos_args[6]) # if_generation_match + self.assertIsNone(pos_args[7]) # if_generation_not_match + self.assertIsNone(pos_args[8]) # if_metageneration_match + self.assertIsNone(pos_args[9]) # if_metageneration_not_match self.assertEqual(kwargs, {}) return pos_args[1]