From c787d12ed9258ef883e381e9fa23b6797cee86a7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 13 Jul 2021 12:44:32 -0400 Subject: [PATCH] tests: suppress deprecation warnings in unit test output (#499) Test hygeine: avoid importing module-under-test at module scope Closes #498. --- google/cloud/storage/blob.py | 11 +-- tests/unit/test_blob.py | 66 +++++++++++----- tests/unit/test_fileio.py | 141 +++++++++++++++++++++++------------ 3 files changed, 148 insertions(+), 70 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 1e8829aea..e6b7e835f 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -36,8 +36,8 @@ import os import re import warnings -import six +import six from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import quote from six.moves.urllib.parse import urlencode @@ -135,6 +135,10 @@ _COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR = ( "'if_source_generation_match' length must be the same as 'sources' length" ) +_DOWNLOAD_AS_STRING_DEPRECATED = ( + "Blob.download_as_string() is deprecated and will be removed in future. " + "Use Blob.download_as_bytes() instead.", +) _DEFAULT_CHUNKSIZE = 104857600 # 1024 * 1024 B * 100 = 100 MB @@ -1514,10 +1518,7 @@ def download_as_string( :raises: :class:`google.cloud.exceptions.NotFound` """ warnings.warn( - "Blob.download_as_string() is deprecated and will be removed in future." - "Use Blob.download_as_bytes() instead.", - PendingDeprecationWarning, - stacklevel=1, + _DOWNLOAD_AS_STRING_DEPRECATED, PendingDeprecationWarning, stacklevel=2 ) return self.download_as_bytes( client=client, diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index fe318a696..41934be33 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -2068,6 +2068,8 @@ def test_download_as_text_w_non_ascii_wo_explicit_encoding_w_charset(self): @mock.patch("warnings.warn") def test_download_as_string(self, mock_warn): + from google.cloud.storage.blob import _DOWNLOAD_AS_STRING_DEPRECATED + MEDIA_LINK = "http://example.com/media/" client = self._make_client() @@ -2096,14 +2098,14 @@ def test_download_as_string(self, mock_warn): retry=DEFAULT_RETRY, ) - mock_warn.assert_called_with( - "Blob.download_as_string() is deprecated and will be removed in future." - "Use Blob.download_as_bytes() instead.", - PendingDeprecationWarning, - stacklevel=1, + mock_warn.assert_called_once_with( + _DOWNLOAD_AS_STRING_DEPRECATED, PendingDeprecationWarning, stacklevel=2, ) - def test_download_as_string_no_retry(self): + @mock.patch("warnings.warn") + def test_download_as_string_no_retry(self, mock_warn): + from google.cloud.storage.blob import _DOWNLOAD_AS_STRING_DEPRECATED + MEDIA_LINK = "http://example.com/media/" client = self._make_client() @@ -2132,6 +2134,10 @@ def test_download_as_string_no_retry(self): retry=None, ) + mock_warn.assert_called_once_with( + _DOWNLOAD_AS_STRING_DEPRECATED, PendingDeprecationWarning, stacklevel=2, + ) + def test__get_content_type_explicit(self): blob = self._make_one(u"blob-name", bucket=None) @@ -2718,7 +2724,7 @@ def test__initiate_resumable_upload_with_extra_headers(self): def test__initiate_resumable_upload_with_retry(self): self._initiate_resumable_helper(retry=DEFAULT_RETRY) - def test__initiate_resumable_upload_with_num_retries(self): + def test__initiate_resumable_upload_w_num_retries(self): self._initiate_resumable_helper(num_retries=11) def test__initiate_resumable_upload_with_retry_conflict(self): @@ -2983,7 +2989,7 @@ def test__do_resumable_upload_with_size(self): def test__do_resumable_upload_with_retry(self): self._do_resumable_helper(retry=DEFAULT_RETRY) - def test__do_resumable_upload_with_num_retries(self): + def test__do_resumable_upload_w_num_retries(self): self._do_resumable_helper(num_retries=8) def test__do_resumable_upload_with_retry_conflict(self): @@ -3129,7 +3135,7 @@ def test__do_upload_uses_resumable_w_custom_timeout(self): def test__do_upload_with_retry(self): self._do_upload_helper(retry=DEFAULT_RETRY) - def test__do_upload_with_num_retries(self): + def test__do_upload_w_num_retries(self): self._do_upload_helper(num_retries=2) def test__do_upload_with_conditional_retry_success(self): @@ -3199,26 +3205,32 @@ def test_upload_from_file_success(self): stream = self._upload_from_file_helper(predefined_acl="private") assert stream.tell() == 2 - @mock.patch("warnings.warn") - def test_upload_from_file_with_retries(self, mock_warn): + def test_upload_from_file_with_retry(self): self._upload_from_file_helper(retry=DEFAULT_RETRY) @mock.patch("warnings.warn") - def test_upload_from_file_with_num_retries(self, mock_warn): - from google.cloud.storage import blob as blob_module + def test_upload_from_file_w_num_retries(self, mock_warn): + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE self._upload_from_file_helper(num_retries=2) + mock_warn.assert_called_once_with( - blob_module._NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, ) @mock.patch("warnings.warn") def test_upload_from_file_with_retry_conflict(self, mock_warn): + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE + # Special case here: in a conflict this method should NOT raise an error # as that's handled further downstream. It should pass both options # through. self._upload_from_file_helper(retry=DEFAULT_RETRY, num_retries=2) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + ) + def test_upload_from_file_with_rewind(self): stream = self._upload_from_file_helper(rewind=True) assert stream.tell() == 0 @@ -3342,8 +3354,10 @@ def test_upload_from_filename_with_retry(self): self.assertEqual(stream.mode, "rb") self.assertEqual(stream.name, temp.name) - def test_upload_from_filename_with_num_retries(self): + @mock.patch("warnings.warn") + def test_upload_from_filename_w_num_retries(self, mock_warn): from google.cloud._testing import _NamedTemporaryFile + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE blob = self._make_one("blob-name", bucket=None) # Mock low-level upload helper on blob (it is tested elsewhere). @@ -3375,6 +3389,10 @@ def test_upload_from_filename_with_num_retries(self): self.assertEqual(stream.mode, "rb") self.assertEqual(stream.name, temp.name) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + ) + def test_upload_from_filename_w_custom_timeout(self): from google.cloud._testing import _NamedTemporaryFile @@ -3453,10 +3471,17 @@ def test_upload_from_string_w_text_w_retry(self): data = u"\N{snowman} \N{sailboat}" self._upload_from_string_helper(data, retry=DEFAULT_RETRY) - def test_upload_from_string_w_text_w_num_retries(self): + @mock.patch("warnings.warn") + def test_upload_from_string_with_num_retries(self, mock_warn): + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE + data = u"\N{snowman} \N{sailboat}" self._upload_from_string_helper(data, num_retries=2) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + ) + def _create_resumable_upload_session_helper( self, origin=None, @@ -4303,7 +4328,10 @@ def test_compose_w_if_generation_match_list_w_warning(self, mock_warn): _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, ) - def test_compose_w_if_generation_match_and_if_s_generation_match(self): + @mock.patch("warnings.warn") + def test_compose_w_if_generation_match_and_if_s_generation_match(self, mock_warn): + from google.cloud.storage.blob import _COMPOSE_IF_GENERATION_LIST_DEPRECATED + source_1_name = "source-1" source_2_name = "source-2" destination_name = "destination" @@ -4324,6 +4352,10 @@ def test_compose_w_if_generation_match_and_if_s_generation_match(self): client._post_resource.assert_not_called() + mock_warn.assert_called_with( + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, + ) + @mock.patch("warnings.warn") def test_compose_w_if_metageneration_match_list_w_warning(self, mock_warn): from google.cloud.storage.blob import _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED diff --git a/tests/unit/test_fileio.py b/tests/unit/test_fileio.py index 9fadc967c..aa64411f7 100644 --- a/tests/unit/test_fileio.py +++ b/tests/unit/test_fileio.py @@ -15,12 +15,11 @@ # limitations under the License. import unittest -import mock import io import string -from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE -from google.cloud.storage.fileio import BlobReader, BlobWriter, SlidingBuffer +import mock + from google.api_core.exceptions import RequestRangeNotSatisfiable from google.cloud.storage.retry import DEFAULT_RETRY @@ -31,11 +30,27 @@ NUM_RETRIES = 2 -class TestBlobReaderBinary(unittest.TestCase): +class _BlobReaderBase: + @staticmethod + def _make_blob_reader(*args, **kwargs): + from google.cloud.storage.fileio import BlobReader + + return BlobReader(*args, **kwargs) + + +class _BlobWriterBase: + @staticmethod + def _make_blob_writer(*args, **kwargs): + from google.cloud.storage.fileio import BlobWriter + + return BlobWriter(*args, **kwargs) + + +class TestBlobReaderBinary(unittest.TestCase, _BlobReaderBase): def test_attributes(self): blob = mock.Mock() blob.chunk_size = 256 - reader = BlobReader(blob) + reader = self._make_blob_reader(blob) self.assertTrue(reader.seekable()) self.assertTrue(reader.readable()) self.assertFalse(reader.writable()) @@ -45,7 +60,7 @@ def test_attributes(self): def test_attributes_explict(self): blob = mock.Mock() blob.chunk_size = 256 - reader = BlobReader(blob, chunk_size=1024, retry=None) + reader = self._make_blob_reader(blob, chunk_size=1024, retry=None) self.assertEqual(reader._chunk_size, 1024) self.assertIsNone(reader._retry) @@ -57,7 +72,7 @@ def read_from_fake_data(start=0, end=None, **_): blob.download_as_bytes = mock.Mock(side_effect=read_from_fake_data) download_kwargs = {"if_metageneration_match": 1} - reader = BlobReader(blob, chunk_size=8, **download_kwargs) + reader = self._make_blob_reader(blob, chunk_size=8, **download_kwargs) # Read and trigger the first download of chunk_size. self.assertEqual(reader.read(1), TEST_BINARY_DATA[0:1]) @@ -102,7 +117,9 @@ def read_from_fake_data(start=0, end=None, **_): blob.download_as_bytes = mock.Mock(side_effect=read_from_fake_data) download_kwargs = {"if_metageneration_match": 1} - reader = BlobReader(blob, chunk_size=8, retry=None, **download_kwargs) + reader = self._make_blob_reader( + blob, chunk_size=8, retry=None, **download_kwargs + ) # Read and trigger the first download of chunk_size. self.assertEqual(reader.read(1), TEST_BINARY_DATA[0:1]) @@ -118,7 +135,7 @@ def test_416_error_handled(self): side_effect=RequestRangeNotSatisfiable("message") ) - reader = BlobReader(blob) + reader = self._make_blob_reader(blob) self.assertEqual(reader.read(), b"") def test_readline(self): @@ -128,7 +145,7 @@ def read_from_fake_data(start=0, end=None, **_): return TEST_BINARY_DATA[start:end] blob.download_as_bytes = mock.Mock(side_effect=read_from_fake_data) - reader = BlobReader(blob, chunk_size=10) + reader = self._make_blob_reader(blob, chunk_size=10) # Read a line. With chunk_size=10, expect three chunks downloaded. self.assertEqual(reader.readline(), TEST_BINARY_DATA[:27]) @@ -168,7 +185,7 @@ def read_from_fake_data(start=0, end=None, **_): blob.download_as_bytes = mock.Mock(side_effect=read_from_fake_data) blob.size = None download_kwargs = {"if_metageneration_match": 1} - reader = BlobReader(blob, chunk_size=8, **download_kwargs) + reader = self._make_blob_reader(blob, chunk_size=8, **download_kwargs) # Seek needs the blob size to work and should call reload() if the size # is not known. Set a mock to initialize the size if reload() is called. @@ -214,7 +231,7 @@ def initialize_size(**_): def test_close(self): blob = mock.Mock() - reader = BlobReader(blob) + reader = self._make_blob_reader(blob) reader.close() @@ -227,20 +244,20 @@ def test_close(self): def test_context_mgr(self): # Just very that the context manager form doesn't crash. blob = mock.Mock() - with BlobReader(blob) as reader: + with self._make_blob_reader(blob) as reader: reader.close() def test_rejects_invalid_kwargs(self): blob = mock.Mock() with self.assertRaises(ValueError): - BlobReader(blob, invalid_kwarg=1) + self._make_blob_reader(blob, invalid_kwarg=1) -class TestBlobWriterBinary(unittest.TestCase): +class TestBlobWriterBinary(unittest.TestCase, _BlobWriterBase): def test_attributes(self): blob = mock.Mock() blob.chunk_size = 256 * 1024 - writer = BlobWriter(blob) + writer = self._make_blob_writer(blob) self.assertFalse(writer.seekable()) self.assertFalse(writer.readable()) self.assertTrue(writer.writable()) @@ -249,7 +266,9 @@ def test_attributes(self): def test_attributes_explicit(self): blob = mock.Mock() blob.chunk_size = 256 * 1024 - writer = BlobWriter(blob, chunk_size=512 * 1024, retry=DEFAULT_RETRY) + writer = self._make_blob_writer( + blob, chunk_size=512 * 1024, retry=DEFAULT_RETRY + ) self.assertEqual(writer._chunk_size, 512 * 1024) self.assertEqual(writer._retry, DEFAULT_RETRY) @@ -257,11 +276,13 @@ def test_reject_wrong_chunk_size(self): blob = mock.Mock() blob.chunk_size = 123 with self.assertRaises(ValueError): - _ = BlobWriter(blob) + _ = self._make_blob_writer(blob) - def test_write(self): - blob = mock.Mock() + @mock.patch("warnings.warn") + def test_write(self, mock_warn): + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE + blob = mock.Mock() upload = mock.Mock() transport = mock.Mock() @@ -274,7 +295,7 @@ def test_write(self): # gives us more control over close() for test purposes. upload_kwargs = {"if_metageneration_match": 1} chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - writer = BlobWriter( + writer = self._make_blob_writer( blob, chunk_size=chunk_size, num_retries=NUM_RETRIES, @@ -315,16 +336,20 @@ def test_write(self): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + ) + def test_flush_fails(self): blob = mock.Mock(chunk_size=None) - writer = BlobWriter(blob) + writer = self._make_blob_writer(blob) with self.assertRaises(io.UnsupportedOperation): writer.flush() def test_seek_fails(self): blob = mock.Mock(chunk_size=None) - writer = BlobWriter(blob) + writer = self._make_blob_writer(blob) with self.assertRaises(io.UnsupportedOperation): writer.seek() @@ -342,7 +367,7 @@ def test_conditional_retry_failure(self): # It would be normal to use a context manager here, but not doing so # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - writer = BlobWriter( + writer = self._make_blob_writer( blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, ) @@ -391,7 +416,7 @@ def test_conditional_retry_pass(self): # It would be normal to use a context manager here, but not doing so # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - writer = BlobWriter( + writer = self._make_blob_writer( blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, @@ -431,8 +456,7 @@ def test_conditional_retry_pass(self): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) - @mock.patch("warnings.warn") - def test_forced_default_retry(self, mock_warn): + def test_forced_default_retry(self): blob = mock.Mock() upload = mock.Mock() @@ -445,7 +469,7 @@ def test_forced_default_retry(self, mock_warn): # It would be normal to use a context manager here, but not doing so # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - writer = BlobWriter( + writer = self._make_blob_writer( blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, @@ -483,7 +507,10 @@ def test_forced_default_retry(self, mock_warn): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) - def test_num_retries_and_retry_conflict(self): + @mock.patch("warnings.warn") + def test_num_retries_and_retry_conflict(self, mock_warn): + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE + blob = mock.Mock() blob._initiate_resumable_upload.side_effect = ValueError @@ -493,7 +520,7 @@ def test_num_retries_and_retry_conflict(self): # It would be normal to use a context manager here, but not doing so # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - writer = BlobWriter( + writer = self._make_blob_writer( blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, @@ -521,10 +548,15 @@ def test_num_retries_and_retry_conflict(self): retry=DEFAULT_RETRY, ) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + ) + @mock.patch("warnings.warn") def test_num_retries_only(self, mock_warn): - blob = mock.Mock() + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE + blob = mock.Mock() upload = mock.Mock() transport = mock.Mock() @@ -535,7 +567,7 @@ def test_num_retries_only(self, mock_warn): # It would be normal to use a context manager here, but not doing so # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - writer = BlobWriter( + writer = self._make_blob_writer( blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, @@ -567,6 +599,7 @@ def test_num_retries_only(self, mock_warn): ) upload.transmit_next_chunk.assert_called_with(transport) self.assertEqual(upload.transmit_next_chunk.call_count, 4) + mock_warn.assert_called_once_with( _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 ) @@ -579,12 +612,18 @@ def test_num_retries_only(self, mock_warn): def test_rejects_invalid_kwargs(self): blob = mock.Mock() with self.assertRaises(ValueError): - BlobWriter(blob, invalid_kwarg=1) + self._make_blob_writer(blob, invalid_kwarg=1) class Test_SlidingBuffer(unittest.TestCase): + @staticmethod + def _make_sliding_buffer(*args, **kwargs): + from google.cloud.storage.fileio import SlidingBuffer + + return SlidingBuffer(*args, **kwargs) + def test_write_and_read(self): - buff = SlidingBuffer() + buff = self._make_sliding_buffer() # Write and verify tell() still reports 0 and len is correct. buff.write(TEST_BINARY_DATA) @@ -597,7 +636,7 @@ def test_write_and_read(self): self.assertEqual(len(buff), len(TEST_BINARY_DATA)) def test_flush(self): - buff = SlidingBuffer() + buff = self._make_sliding_buffer() # Write and verify tell() still reports 0 and len is correct. buff.write(TEST_BINARY_DATA) @@ -620,7 +659,7 @@ def test_flush(self): self.assertEqual(len(buff), len(TEST_BINARY_DATA[8:])) def test_seek(self): - buff = SlidingBuffer() + buff = self._make_sliding_buffer() buff.write(TEST_BINARY_DATA) # Try to seek forward. Verify the tell() doesn't change. @@ -643,16 +682,16 @@ def test_seek(self): self.assertEqual(pos, buff.tell()) def test_close(self): - buff = SlidingBuffer() + buff = self._make_sliding_buffer() buff.close() with self.assertRaises(ValueError): buff.read() -class TestBlobReaderText(unittest.TestCase): +class TestBlobReaderText(unittest.TestCase, _BlobReaderBase): def test_attributes(self): blob = mock.Mock() - reader = io.TextIOWrapper(BlobReader(blob)) + reader = io.TextIOWrapper(self._make_blob_reader(blob)) self.assertTrue(reader.seekable()) self.assertTrue(reader.readable()) self.assertFalse(reader.writable()) @@ -667,7 +706,7 @@ def read_from_fake_data(start=0, end=None, **_): blob.chunk_size = None blob.size = len(TEST_TEXT_DATA.encode("utf-8")) download_kwargs = {"if_metageneration_match": 1} - reader = io.TextIOWrapper(BlobReader(blob, **download_kwargs)) + reader = io.TextIOWrapper(self._make_blob_reader(blob, **download_kwargs)) # The TextIOWrapper class has an internally defined chunk size which # will override ours. The wrapper class is not under test. @@ -698,7 +737,7 @@ def read_from_fake_data(start=0, end=None, **_): blob.chunk_size = None blob.size = len(TEST_MULTIBYTE_TEXT_DATA.encode("utf-8")) download_kwargs = {"if_metageneration_match": 1} - reader = io.TextIOWrapper(BlobReader(blob, **download_kwargs)) + reader = io.TextIOWrapper(self._make_blob_reader(blob, **download_kwargs)) # The TextIOWrapper class has an internally defined chunk size which # will override ours. The wrapper class is not under test. @@ -729,7 +768,7 @@ def read_from_fake_data(start=0, end=None, **_): blob.size = None blob.chunk_size = None download_kwargs = {"if_metageneration_match": 1} - reader = io.TextIOWrapper(BlobReader(blob, **download_kwargs)) + reader = io.TextIOWrapper(self._make_blob_reader(blob, **download_kwargs)) # Seek needs the blob size to work and should call reload() if the size # is not known. Set a mock to initialize the size if reload() is called. @@ -762,7 +801,7 @@ def read_from_fake_data(start=0, end=None, **_): blob.size = None blob.chunk_size = None download_kwargs = {"if_metageneration_match": 1} - reader = io.TextIOWrapper(BlobReader(blob, **download_kwargs)) + reader = io.TextIOWrapper(self._make_blob_reader(blob, **download_kwargs)) # Seek needs the blob size to work and should call reload() if the size # is not known. Set a mock to initialize the size if reload() is called. @@ -788,7 +827,7 @@ def initialize_size(**_): def test_close(self): blob = mock.Mock() - reader = BlobReader(blob) + reader = self._make_blob_reader(blob) reader.close() @@ -799,10 +838,12 @@ def test_close(self): reader.seek(0) -class TestBlobWriterText(unittest.TestCase): - def test_write(self): - blob = mock.Mock() +class TestBlobWriterText(unittest.TestCase, _BlobWriterBase): + @mock.patch("warnings.warn") + def test_write(self, mock_warn): + from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE + blob = mock.Mock() upload = mock.Mock() transport = mock.Mock() @@ -813,7 +854,7 @@ def test_write(self): # It would be normal to use a context manager here, but not doing so # gives us more control over close() for test purposes. chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. - unwrapped_writer = BlobWriter( + unwrapped_writer = self._make_blob_writer( blob, chunk_size=chunk_size, text_mode=True, @@ -848,3 +889,7 @@ def test_write(self): retry=None, ) upload.transmit_next_chunk.assert_called_with(transport) + + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2, + )