diff --git a/pybigquery/sqlalchemy_bigquery.py b/pybigquery/sqlalchemy_bigquery.py index c73adea9..5a6ad105 100644 --- a/pybigquery/sqlalchemy_bigquery.py +++ b/pybigquery/sqlalchemy_bigquery.py @@ -165,15 +165,6 @@ def __init__(self, dialect, statement, column_keys=None, inline=False, **kwargs) dialect, statement, column_keys, inline, **kwargs ) - def visit_select(self, *args, **kwargs): - """ - Use labels for every column. - This ensures that fields won't contain duplicate names - """ - - args[0].use_labels = True - return super(BigQueryCompiler, self).visit_select(*args, **kwargs) - def visit_column( self, column, add_to_result_map=None, include_table=True, **kwargs ): diff --git a/setup.py b/setup.py index e53f7995..eb0279d1 100644 --- a/setup.py +++ b/setup.py @@ -81,8 +81,9 @@ def readme(): platforms="Posix; MacOS X; Windows", install_requires=[ "sqlalchemy>=1.1.9,<1.4.0dev", - "google-auth>=1.2.0,<2.0dev", + "google-auth>=1.14.0,<2.0dev", # Work around pip wack. "google-cloud-bigquery>=1.12.0", + "google-api-core>=1.19.1", # Work-around bug in cloud core deps. "future", ], python_requires=">=3.6, <3.10", diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index ab72cf88..34cbdb7a 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,5 +5,5 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", sqlalchemy==1.1.9 -google-auth==1.2.0 +google-auth==1.14.0 google-cloud-bigquery==1.12.0 diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 00000000..22def748 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,16 @@ +import mock +import pytest +import sqlalchemy + +import fauxdbi + + +@pytest.fixture() +def faux_conn(): + with mock.patch( + "google.cloud.bigquery.dbapi.connection.Connection", fauxdbi.Connection + ): + engine = sqlalchemy.create_engine("bigquery://myproject/mydataset") + conn = engine.connect() + yield conn + conn.close() diff --git a/tests/unit/fauxdbi.py b/tests/unit/fauxdbi.py new file mode 100644 index 00000000..44c4edae --- /dev/null +++ b/tests/unit/fauxdbi.py @@ -0,0 +1,98 @@ +import google.api_core.exceptions +import google.cloud.bigquery.schema +import google.cloud.bigquery.table +import contextlib +import sqlite3 + + +class Connection: + + connection = None + + def __init__(self, client=None, bqstorage_client=None): + # share a single connection: + if self.connection is None: + self.__class__.connection = sqlite3.connect(":memory:") + self._client = FauxClient(client, self.connection) + + def cursor(self): + return Cursor(self.connection) + + def commit(self): + pass + + def rollback(self): + pass + + def close(self): + self.connection.close() + + +class Cursor: + + arraysize = 1 + + def __init__(self, connection): + self.connection = connection + self.cursor = connection.cursor() + + def execute(self, operation, parameters=None): + if parameters: + parameters = { + name: "null" if value is None else repr(value) + for name, value in parameters.items() + } + operation %= parameters + self.cursor.execute(operation, parameters) + self.description = self.cursor.description + self.rowcount = self.cursor.rowcount + + def executemany(self, operation, parameters_list): + for parameters in parameters_list: + self.execute(operation, parameters) + + def close(self): + self.cursor.close() + + def fetchone(self): + return self.cursor.fetchone() + + def fetchmany(self, size=None): + self.cursor.fetchmany(size or self.arraysize) + + def fetchall(self): + return self.cursor.fetchall() + + def setinputsizes(self, sizes): + pass + + def setoutputsize(self, size, column=None): + pass + + +class FauxClient: + def __init__(self, client, connection): + self._client = client + self.project = client.project + self.connection = connection + + def get_table(self, table_ref): + table_name = table_ref.table_id + with contextlib.closing(self.connection.cursor()) as cursor: + cursor.execute( + f"select name from sqlite_master" + f" where type='table' and name='{table_name}'" + ) + if list(cursor): + cursor.execute("PRAGMA table_info('{table_name}')") + schema = [ + google.cloud.bigquery.schema.SchemaField( + name=name, + field_type=type_, + mode="REQUIRED" if notnull else "NULLABLE", + ) + for cid, name, type_, notnull, dflt_value, pk in cursor + ] + return google.cloud.bigquery.table.Table(table_ref, schema) + else: + raise google.api_core.exceptions.NotFound(table_ref) diff --git a/tests/unit/test_select.py b/tests/unit/test_select.py new file mode 100644 index 00000000..f1c9cb09 --- /dev/null +++ b/tests/unit/test_select.py @@ -0,0 +1,11 @@ +import sqlalchemy + + +def test_labels_not_forced(faux_conn): + metadata = sqlalchemy.MetaData() + table = sqlalchemy.Table( + "some_table", metadata, sqlalchemy.Column("id", sqlalchemy.Integer) + ) + metadata.create_all(faux_conn.engine) + result = faux_conn.execute(sqlalchemy.select([table.c.id])) + assert result.keys() == ["id"] # Look! Just the column name!