Skip to content

Commit

Permalink
chore: remove six dependency (#461)
Browse files Browse the repository at this point in the history
* chore: remove six dependency

* Remove now-redundant self argument
  • Loading branch information
plamut committed Jan 8, 2021
1 parent d01d199 commit 0023d19
Show file tree
Hide file tree
Showing 32 changed files with 150 additions and 188 deletions.
3 changes: 1 addition & 2 deletions google/cloud/bigquery/_helpers.py
Expand Up @@ -18,7 +18,6 @@
import datetime
import decimal
import re
import six

from google.cloud._helpers import UTC
from google.cloud._helpers import _date_from_iso8601_date
Expand Down Expand Up @@ -451,7 +450,7 @@ def _record_field_to_json(fields, row_value):
for field_name in not_processed:
value = row_value[field_name]
if value is not None:
record[field_name] = six.text_type(value)
record[field_name] = str(value)

return record

Expand Down
5 changes: 2 additions & 3 deletions google/cloud/bigquery/_pandas_helpers.py
Expand Up @@ -17,10 +17,9 @@
import concurrent.futures
import functools
import logging
import queue
import warnings

import six
from six.moves import queue

try:
import pandas
Expand Down Expand Up @@ -738,7 +737,7 @@ def download_dataframe_bqstorage(
def dataframe_to_json_generator(dataframe):
for row in dataframe.itertuples(index=False, name=None):
output = {}
for column, value in six.moves.zip(dataframe.columns, row):
for column, value in zip(dataframe.columns, row):
# Omit NaN values.
if value != value:
continue
Expand Down
5 changes: 2 additions & 3 deletions google/cloud/bigquery/client.py
Expand Up @@ -34,7 +34,6 @@
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
import six

from google import resumable_media
from google.resumable_media.requests import MultipartUpload
Expand Down Expand Up @@ -2017,7 +2016,7 @@ def load_table_from_uri(

job_ref = job._JobReference(job_id, project=project, location=location)

if isinstance(source_uris, six.string_types):
if isinstance(source_uris, str):
source_uris = [source_uris]

destination = _table_arg_to_table_ref(destination, default_project=self.project)
Expand Down Expand Up @@ -2779,7 +2778,7 @@ def extract_table(
)
)

if isinstance(destination_uris, six.string_types):
if isinstance(destination_uris, str):
destination_uris = [destination_uris]

if job_config:
Expand Down
15 changes: 7 additions & 8 deletions google/cloud/bigquery/dataset.py
Expand Up @@ -16,7 +16,6 @@

from __future__ import absolute_import

import six
import copy

import google.cloud._helpers
Expand Down Expand Up @@ -260,9 +259,9 @@ class DatasetReference(object):
"""

def __init__(self, project, dataset_id):
if not isinstance(project, six.string_types):
if not isinstance(project, str):
raise ValueError("Pass a string for project")
if not isinstance(dataset_id, six.string_types):
if not isinstance(dataset_id, str):
raise ValueError("Pass a string for dataset_id")
self._project = project
self._dataset_id = dataset_id
Expand Down Expand Up @@ -407,7 +406,7 @@ class Dataset(object):
}

def __init__(self, dataset_ref):
if isinstance(dataset_ref, six.string_types):
if isinstance(dataset_ref, str):
dataset_ref = DatasetReference.from_string(dataset_ref)
self._properties = {"datasetReference": dataset_ref.to_api_repr(), "labels": {}}

Expand Down Expand Up @@ -544,7 +543,7 @@ def default_table_expiration_ms(self):

@default_table_expiration_ms.setter
def default_table_expiration_ms(self, value):
if not isinstance(value, six.integer_types) and value is not None:
if not isinstance(value, int) and value is not None:
raise ValueError("Pass an integer, or None")
self._properties["defaultTableExpirationMs"] = _helpers._str_or_none(value)

Expand All @@ -560,7 +559,7 @@ def description(self):

@description.setter
def description(self, value):
if not isinstance(value, six.string_types) and value is not None:
if not isinstance(value, str) and value is not None:
raise ValueError("Pass a string, or None")
self._properties["description"] = value

Expand All @@ -576,7 +575,7 @@ def friendly_name(self):

@friendly_name.setter
def friendly_name(self, value):
if not isinstance(value, six.string_types) and value is not None:
if not isinstance(value, str) and value is not None:
raise ValueError("Pass a string, or None")
self._properties["friendlyName"] = value

Expand All @@ -592,7 +591,7 @@ def location(self):

@location.setter
def location(self, value):
if not isinstance(value, six.string_types) and value is not None:
if not isinstance(value, str) and value is not None:
raise ValueError("Pass a string, or None")
self._properties["location"] = value

Expand Down
10 changes: 4 additions & 6 deletions google/cloud/bigquery/dbapi/_helpers.py
Expand Up @@ -19,8 +19,6 @@
import functools
import numbers

import six

from google.cloud import bigquery
from google.cloud.bigquery import table
from google.cloud.bigquery.dbapi import exceptions
Expand Down Expand Up @@ -132,7 +130,7 @@ def to_query_parameters_dict(parameters):
"""
result = []

for name, value in six.iteritems(parameters):
for name, value in parameters.items():
if isinstance(value, collections_abc.Mapping):
raise NotImplementedError(
"STRUCT-like parameter values are not supported "
Expand Down Expand Up @@ -187,9 +185,9 @@ def bigquery_scalar_type(value):
return "FLOAT64"
elif isinstance(value, decimal.Decimal):
return "NUMERIC"
elif isinstance(value, six.text_type):
elif isinstance(value, str):
return "STRING"
elif isinstance(value, six.binary_type):
elif isinstance(value, bytes):
return "BYTES"
elif isinstance(value, datetime.datetime):
return "DATETIME" if value.tzinfo is None else "TIMESTAMP"
Expand All @@ -215,7 +213,7 @@ def array_like(value):
bool: ``True`` if the value is considered array-like, ``False`` otherwise.
"""
return isinstance(value, collections_abc.Sequence) and not isinstance(
value, (six.text_type, six.binary_type, bytearray)
value, (str, bytes, bytearray)
)


Expand Down
4 changes: 1 addition & 3 deletions google/cloud/bigquery/dbapi/cursor.py
Expand Up @@ -19,8 +19,6 @@
import copy
import logging

import six

from google.cloud.bigquery import job
from google.cloud.bigquery.dbapi import _helpers
from google.cloud.bigquery.dbapi import exceptions
Expand Down Expand Up @@ -289,7 +287,7 @@ def fetchone(self):
"""
self._try_fetch()
try:
return six.next(self._query_data)
return next(self._query_data)
except StopIteration:
return None

Expand Down
4 changes: 2 additions & 2 deletions google/cloud/bigquery/enums.py
Expand Up @@ -15,7 +15,7 @@
import re

import enum
import six
import itertools

from google.cloud.bigquery_v2 import types as gapic_types

Expand Down Expand Up @@ -178,7 +178,7 @@ def _make_sql_scalars_enum():
)

new_doc = "\n".join(
six.moves.filterfalse(skip_pattern.search, orig_doc.splitlines())
itertools.filterfalse(skip_pattern.search, orig_doc.splitlines())
)
new_enum.__doc__ = "An Enum of scalar SQL types.\n" + new_doc

Expand Down
40 changes: 20 additions & 20 deletions google/cloud/bigquery/job/base.py
Expand Up @@ -15,11 +15,11 @@
"""Base classes and helpers for job classes."""

import copy
import http
import threading

from google.api_core import exceptions
import google.api_core.future.polling
from six.moves import http_client

from google.cloud.bigquery import _helpers
from google.cloud.bigquery.retry import DEFAULT_RETRY
Expand All @@ -28,24 +28,24 @@
_DONE_STATE = "DONE"
_STOPPED_REASON = "stopped"
_ERROR_REASON_TO_EXCEPTION = {
"accessDenied": http_client.FORBIDDEN,
"backendError": http_client.INTERNAL_SERVER_ERROR,
"billingNotEnabled": http_client.FORBIDDEN,
"billingTierLimitExceeded": http_client.BAD_REQUEST,
"blocked": http_client.FORBIDDEN,
"duplicate": http_client.CONFLICT,
"internalError": http_client.INTERNAL_SERVER_ERROR,
"invalid": http_client.BAD_REQUEST,
"invalidQuery": http_client.BAD_REQUEST,
"notFound": http_client.NOT_FOUND,
"notImplemented": http_client.NOT_IMPLEMENTED,
"quotaExceeded": http_client.FORBIDDEN,
"rateLimitExceeded": http_client.FORBIDDEN,
"resourceInUse": http_client.BAD_REQUEST,
"resourcesExceeded": http_client.BAD_REQUEST,
"responseTooLarge": http_client.FORBIDDEN,
"stopped": http_client.OK,
"tableUnavailable": http_client.BAD_REQUEST,
"accessDenied": http.client.FORBIDDEN,
"backendError": http.client.INTERNAL_SERVER_ERROR,
"billingNotEnabled": http.client.FORBIDDEN,
"billingTierLimitExceeded": http.client.BAD_REQUEST,
"blocked": http.client.FORBIDDEN,
"duplicate": http.client.CONFLICT,
"internalError": http.client.INTERNAL_SERVER_ERROR,
"invalid": http.client.BAD_REQUEST,
"invalidQuery": http.client.BAD_REQUEST,
"notFound": http.client.NOT_FOUND,
"notImplemented": http.client.NOT_IMPLEMENTED,
"quotaExceeded": http.client.FORBIDDEN,
"rateLimitExceeded": http.client.FORBIDDEN,
"resourceInUse": http.client.BAD_REQUEST,
"resourcesExceeded": http.client.BAD_REQUEST,
"responseTooLarge": http.client.FORBIDDEN,
"stopped": http.client.OK,
"tableUnavailable": http.client.BAD_REQUEST,
}


Expand All @@ -66,7 +66,7 @@ def _error_result_to_exception(error_result):
"""
reason = error_result.get("reason")
status_code = _ERROR_REASON_TO_EXCEPTION.get(
reason, http_client.INTERNAL_SERVER_ERROR
reason, http.client.INTERNAL_SERVER_ERROR
)
return exceptions.from_http_status(
status_code, error_result.get("message", ""), errors=[error_result]
Expand Down
5 changes: 2 additions & 3 deletions google/cloud/bigquery/job/query.py
Expand Up @@ -20,7 +20,6 @@

from google.api_core import exceptions
import requests
import six

from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import DatasetListItem
Expand Down Expand Up @@ -192,7 +191,7 @@ def default_dataset(self, value):
self._set_sub_prop("defaultDataset", None)
return

if isinstance(value, six.string_types):
if isinstance(value, str):
value = DatasetReference.from_string(value)

if isinstance(value, (Dataset, DatasetListItem)):
Expand Down Expand Up @@ -1168,7 +1167,7 @@ def result(
exc.query_job = self
raise
except requests.exceptions.Timeout as exc:
six.raise_from(concurrent.futures.TimeoutError, exc)
raise concurrent.futures.TimeoutError from exc

# If the query job is complete but there are no query results, this was
# special job, such as a DDL query. Return an empty result set to
Expand Down
12 changes: 5 additions & 7 deletions google/cloud/bigquery/magics/magics.py
Expand Up @@ -153,8 +153,6 @@
except ImportError: # pragma: NO COVER
raise ImportError("This module can only be loaded in IPython.")

import six

from google.api_core import client_info
from google.api_core import client_options
from google.api_core.exceptions import NotFound
Expand Down Expand Up @@ -577,16 +575,16 @@ def _cell_magic(line, query):
"--params is not a correctly formatted JSON string or a JSON "
"serializable dictionary"
)
six.raise_from(rebranded_error, exc)
raise rebranded_error from exc
except lap.exceptions.DuplicateQueryParamsError as exc:
rebranded_error = ValueError("Duplicate --params option.")
six.raise_from(rebranded_error, exc)
raise rebranded_error from exc
except lap.exceptions.ParseError as exc:
rebranded_error = ValueError(
"Unrecognized input, are option values correct? "
"Error details: {}".format(exc.args[0])
)
six.raise_from(rebranded_error, exc)
raise rebranded_error from exc

args = magic_arguments.parse_argstring(_cell_magic, rest_of_args)

Expand Down Expand Up @@ -768,15 +766,15 @@ def _make_bqstorage_client(use_bqstorage_api, credentials, client_options):
"to use it. Alternatively, use the classic REST API by specifying "
"the --use_rest_api magic option."
)
six.raise_from(customized_error, err)
raise customized_error from err

try:
from google.api_core.gapic_v1 import client_info as gapic_client_info
except ImportError as err:
customized_error = ImportError(
"Install the grpcio package to use the BigQuery Storage API."
)
six.raise_from(customized_error, err)
raise customized_error from err

return bigquery_storage.BigQueryReadClient(
credentials=credentials,
Expand Down
5 changes: 2 additions & 3 deletions google/cloud/bigquery/model.py
Expand Up @@ -19,7 +19,6 @@
import copy

from google.protobuf import json_format
import six

import google.cloud._helpers
from google.api_core import datetime_helpers
Expand Down Expand Up @@ -63,7 +62,7 @@ def __init__(self, model_ref):
# buffer classes do not.
self._properties = {}

if isinstance(model_ref, six.string_types):
if isinstance(model_ref, str):
model_ref = ModelReference.from_string(model_ref)

if model_ref:
Expand Down Expand Up @@ -455,7 +454,7 @@ def _model_arg_to_model_ref(value, default_project=None):
This function keeps ModelReference and other kinds of objects unchanged.
"""
if isinstance(value, six.string_types):
if isinstance(value, str):
return ModelReference.from_string(value, default_project=default_project)
if isinstance(value, Model):
return value.reference
Expand Down
3 changes: 1 addition & 2 deletions google/cloud/bigquery/routine.py
Expand Up @@ -17,7 +17,6 @@
"""Define resources for the BigQuery Routines API."""

from google.protobuf import json_format
import six

import google.cloud._helpers
from google.cloud.bigquery import _helpers
Expand Down Expand Up @@ -54,7 +53,7 @@ class Routine(object):
}

def __init__(self, routine_ref, **kwargs):
if isinstance(routine_ref, six.string_types):
if isinstance(routine_ref, str):
routine_ref = RoutineReference.from_string(routine_ref)

self._properties = {"routineReference": routine_ref.to_api_repr()}
Expand Down

0 comments on commit 0023d19

Please sign in to comment.