Skip to content

Commit

Permalink
chore: drop six (#310)
Browse files Browse the repository at this point in the history
  • Loading branch information
tseaver committed Sep 22, 2021
1 parent 06bdecd commit f4fcc4e
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
8 changes: 4 additions & 4 deletions google/cloud/bigquery_storage_v1/reader.py
Expand Up @@ -15,6 +15,7 @@
from __future__ import absolute_import

import collections
import io
import json

try:
Expand All @@ -31,7 +32,6 @@
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
import six

try:
import pyarrow
Expand Down Expand Up @@ -400,7 +400,7 @@ def _dtypes_from_avro(self, avro_fields):
if isinstance(field_info["type"], list):
type_info = next(item for item in field_info["type"] if item != "null")

if isinstance(type_info, six.string_types):
if isinstance(type_info, str):
field_dtype = type_map.get(type_info, "object")
else:
logical_type = type_info.get("logicalType")
Expand Down Expand Up @@ -461,7 +461,7 @@ def next(self):
self._parse_rows()
if self._remaining > 0:
self._remaining -= 1
return six.next(self._iter_rows)
return next(self._iter_rows)

# Alias needed for Python 2/3 support.
__next__ = next
Expand Down Expand Up @@ -652,7 +652,7 @@ def to_rows(self, message):
A sequence of rows, represented as dictionaries.
"""
self._parse_fastavro()
messageio = six.BytesIO(message.avro_rows.serialized_binary_rows)
messageio = io.BytesIO(message.avro_rows.serialized_binary_rows)
while True:
# Loop in a while loop because schemaless_reader can only read
# a single record.
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_reader_v1.py
Expand Up @@ -15,14 +15,14 @@
# limitations under the License.

import itertools
import io
import json
from unittest import mock

import fastavro
import pandas
import pandas.testing
import pytest
import six

import google.api_core.exceptions
from google.cloud.bigquery_storage import types
Expand Down Expand Up @@ -68,7 +68,7 @@ def _bq_to_avro_blocks(bq_blocks, avro_schema_json):
avro_blocks = []
first_message = True
for block in bq_blocks:
blockio = six.BytesIO()
blockio = io.BytesIO()
for row in block:
fastavro.schemaless_writer(blockio, avro_schema, row)
response = types.ReadRowsResponse()
Expand Down

0 comments on commit f4fcc4e

Please sign in to comment.