Skip to content

Commit

Permalink
fix: preserve metadata value (#298)
Browse files Browse the repository at this point in the history
Fixes #293
  • Loading branch information
HemangChothani committed Nov 11, 2020
1 parent 3bf5c52 commit 5ab6b0d
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 6 deletions.
4 changes: 4 additions & 0 deletions google/cloud/storage/blob.py
Expand Up @@ -1608,6 +1608,8 @@ def _do_multipart_upload(
raise ValueError(msg)

transport = self._get_transport(client)
if "metadata" in self._properties and "metadata" not in self._changes:
self._changes.add("metadata")
info = self._get_upload_arguments(content_type)
headers, object_metadata, content_type = info

Expand Down Expand Up @@ -1775,6 +1777,8 @@ def _initiate_resumable_upload(
chunk_size = _DEFAULT_CHUNKSIZE

transport = self._get_transport(client)
if "metadata" in self._properties and "metadata" not in self._changes:
self._changes.add("metadata")
info = self._get_upload_arguments(content_type)
headers, object_metadata, content_type = info
if extra_headers is not None:
Expand Down
46 changes: 40 additions & 6 deletions tests/unit/test_blob.py
Expand Up @@ -1828,12 +1828,17 @@ def _do_multipart_success(
if_metageneration_not_match=None,
kms_key_name=None,
timeout=None,
metadata=None,
):
from six.moves.urllib.parse import urlencode

bucket = _Bucket(name="w00t", user_project=user_project)
blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name)
self.assertIsNone(blob.chunk_size)
if metadata:
self.assertIsNone(blob.metadata)
blob._properties["metadata"] = metadata
self.assertEqual(len(blob._changes), 0)

# Create mocks to be checked for doing transport.
transport = self._mock_transport(http_client.OK, {})
Expand Down Expand Up @@ -1906,10 +1911,18 @@ def _do_multipart_success(

upload_url += "?" + urlencode(qs_params)

blob_data = b'{"name": "blob-name"}\r\n'
if metadata:
blob_data = (
b'{"name": "blob-name", "metadata": '
+ json.dumps(metadata).encode("utf-8")
+ b"}\r\n"
)
self.assertEqual(blob._changes, set(["metadata"]))
payload = (
b"--==0==\r\n"
+ b"content-type: application/json; charset=UTF-8\r\n\r\n"
+ b'{"name": "blob-name"}\r\n'
+ blob_data
+ b"--==0==\r\n"
+ b"content-type: application/xml\r\n\r\n"
+ data_read
Expand Down Expand Up @@ -1974,6 +1987,10 @@ def test__do_multipart_upload_with_generation_not_match(self, mock_get_boundary)
mock_get_boundary, if_generation_not_match=4, if_metageneration_not_match=4
)

@mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==")
def test__do_multipart_upload_with_metadata(self, mock_get_boundary):
self._do_multipart_success(mock_get_boundary, metadata={"test": "test"})

def test__do_multipart_upload_bad_size(self):
blob = self._make_one(u"blob-name", bucket=None)

Expand Down Expand Up @@ -2006,14 +2023,20 @@ def _initiate_resumable_helper(
blob_chunk_size=786432,
kms_key_name=None,
timeout=None,
metadata=None,
):
from six.moves.urllib.parse import urlencode
from google.resumable_media.requests import ResumableUpload
from google.cloud.storage.blob import _DEFAULT_CHUNKSIZE

bucket = _Bucket(name="whammy", user_project=user_project)
blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name)
blob.metadata = {"rook": "takes knight"}
if metadata:
self.assertIsNone(blob.metadata)
blob._properties["metadata"] = metadata
self.assertEqual(len(blob._changes), 0)
else:
blob.metadata = {"rook": "takes knight"}
blob.chunk_size = blob_chunk_size
if blob_chunk_size is not None:
self.assertIsNotNone(blob.chunk_size)
Expand All @@ -2022,8 +2045,11 @@ def _initiate_resumable_helper(

# Need to make sure **same** dict is used because ``json.dumps()``
# will depend on the hash order.
object_metadata = blob._get_writable_metadata()
blob._get_writable_metadata = mock.Mock(return_value=object_metadata, spec=[])
if not metadata:
object_metadata = blob._get_writable_metadata()
blob._get_writable_metadata = mock.Mock(
return_value=object_metadata, spec=[]
)

# Create mocks to be checked for doing transport.
resumable_url = "http://test.invalid?upload_id=hey-you"
Expand Down Expand Up @@ -2107,6 +2133,8 @@ def _initiate_resumable_helper(
self.assertNotEqual(blob.chunk_size, chunk_size)
self.assertEqual(upload._chunk_size, chunk_size)
self.assertIs(upload._stream, stream)
if metadata:
self.assertEqual(blob._changes, set(["metadata"]))
if size is None:
self.assertIsNone(upload._total_bytes)
else:
Expand All @@ -2125,8 +2153,11 @@ def _initiate_resumable_helper(
# Make sure we never read from the stream.
self.assertEqual(stream.tell(), 0)

# Check the mocks.
blob._get_writable_metadata.assert_called_once_with()
if metadata:
object_metadata = {"name": u"blob-name", "metadata": metadata}
else:
# Check the mocks.
blob._get_writable_metadata.assert_called_once_with()
payload = json.dumps(object_metadata).encode("utf-8")
expected_headers = {
"content-type": "application/json; charset=UTF-8",
Expand All @@ -2144,6 +2175,9 @@ def _initiate_resumable_helper(
timeout=expected_timeout,
)

def test__initiate_resumable_upload_with_metadata(self):
self._initiate_resumable_helper(metadata={"test": "test"})

def test__initiate_resumable_upload_with_custom_timeout(self):
self._initiate_resumable_helper(timeout=9.58)

Expand Down

0 comments on commit 5ab6b0d

Please sign in to comment.