diff --git a/.kokoro/dependencies.sh b/.kokoro/dependencies.sh index 9030ba8f99..6958bffe79 100755 --- a/.kokoro/dependencies.sh +++ b/.kokoro/dependencies.sh @@ -47,11 +47,11 @@ function completenessCheck() { # This is stripped from the output as it is not present in the flattened pom. # Only dependencies with 'compile' or 'runtime' scope are included from original dependency list. msg "Generating dependency list using original pom..." - mvn dependency:list -f pom.xml -DincludeScope=runtime -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' | sed -e 's/ --.*//' >.org-list.txt + mvn dependency:list -f pom.xml -DincludeScope=runtime -DexcludeArtifactIds=gson,commons-codec,commons-logging,opencensus-contrib-http-util,httpclient,httpcore -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' | sed -e 's/ --.*//' >.org-list.txt # Output dep list generated using the flattened pom (only 'compile' and 'runtime' scopes) msg "Generating dependency list using flattened pom..." - mvn dependency:list -f .flattened-pom.xml -DincludeScope=runtime -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' >.new-list.txt + mvn dependency:list -f .flattened-pom.xml -DincludeScope=runtime -DexcludeArtifactIds=gson,commons-codec,commons-logging,opencensus-contrib-http-util,httpclient,httpcore -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' >.new-list.txt # Compare two dependency lists msg "Comparing dependency lists..." diff --git a/google-cloud-bigquerystorage/clirr-ignored-differences.xml b/google-cloud-bigquerystorage/clirr-ignored-differences.xml new file mode 100644 index 0000000000..ddf344fda3 --- /dev/null +++ b/google-cloud-bigquerystorage/clirr-ignored-differences.xml @@ -0,0 +1,19 @@ + + + + + 8001 + com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverter + com.google.cloud.bigquery.storage.v1beta2.BQV2ToBQStorageConverter + + + 8001 + com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibility + com.google.cloud.bigquery.storage.v1beta2.SchemaCompatibility + + + 7002 + com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter + com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter$Builder newBuilder(java.lang.String, com.google.cloud.bigquery.Schema) + + diff --git a/google-cloud-bigquerystorage/pom.xml b/google-cloud-bigquerystorage/pom.xml index c8aff3ce22..0988be9fb1 100644 --- a/google-cloud-bigquerystorage/pom.xml +++ b/google-cloud-bigquerystorage/pom.xml @@ -146,6 +146,7 @@ com.google.cloud google-cloud-bigquery + test com.google.code.findbugs diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverter.java deleted file mode 100644 index 422c4d4dfe..0000000000 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverter.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.cloud.bigquery.storage.v1beta2; - -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.common.collect.ImmutableMap; - -/** Converts structure from BigQuery v2 API to BigQueryStorage API */ -public class BQV2ToBQStorageConverter { - private static ImmutableMap BQTableSchemaModeMap = - ImmutableMap.of( - Field.Mode.NULLABLE, TableFieldSchema.Mode.NULLABLE, - Field.Mode.REPEATED, TableFieldSchema.Mode.REPEATED, - Field.Mode.REQUIRED, TableFieldSchema.Mode.REQUIRED); - - private static ImmutableMap BQTableSchemaTypeMap = - new ImmutableMap.Builder() - .put(StandardSQLTypeName.BOOL, TableFieldSchema.Type.BOOL) - .put(StandardSQLTypeName.BYTES, TableFieldSchema.Type.BYTES) - .put(StandardSQLTypeName.DATE, TableFieldSchema.Type.DATE) - .put(StandardSQLTypeName.DATETIME, TableFieldSchema.Type.DATETIME) - .put(StandardSQLTypeName.FLOAT64, TableFieldSchema.Type.DOUBLE) - .put(StandardSQLTypeName.GEOGRAPHY, TableFieldSchema.Type.GEOGRAPHY) - .put(StandardSQLTypeName.INT64, TableFieldSchema.Type.INT64) - .put(StandardSQLTypeName.NUMERIC, TableFieldSchema.Type.NUMERIC) - .put(StandardSQLTypeName.STRING, TableFieldSchema.Type.STRING) - .put(StandardSQLTypeName.STRUCT, TableFieldSchema.Type.STRUCT) - .put(StandardSQLTypeName.TIME, TableFieldSchema.Type.TIME) - .put(StandardSQLTypeName.TIMESTAMP, TableFieldSchema.Type.TIMESTAMP) - .build(); - - /** - * Converts from bigquery v2 Table Schema to bigquery storage API Table Schema. - * - * @param schame the bigquery v2 Table Schema - * @return the bigquery storage API Table Schema - */ - public static TableSchema ConvertTableSchema(Schema schema) { - TableSchema.Builder result = TableSchema.newBuilder(); - for (int i = 0; i < schema.getFields().size(); i++) { - result.addFields(i, ConvertFieldSchema(schema.getFields().get(i))); - } - return result.build(); - } - - /** - * Converts from bigquery v2 Field Schema to bigquery storage API Field Schema. - * - * @param schame the bigquery v2 Field Schema - * @return the bigquery storage API Field Schema - */ - public static TableFieldSchema ConvertFieldSchema(Field field) { - TableFieldSchema.Builder result = TableFieldSchema.newBuilder(); - if (field.getMode() == null) { - field = field.toBuilder().setMode(Field.Mode.NULLABLE).build(); - } - result.setMode(BQTableSchemaModeMap.get(field.getMode())); - result.setName(field.getName()); - result.setType(BQTableSchemaTypeMap.get(field.getType().getStandardType())); - if (field.getDescription() != null) { - result.setDescription(field.getDescription()); - } - if (field.getSubFields() != null) { - for (int i = 0; i < field.getSubFields().size(); i++) { - result.addFields(i, ConvertFieldSchema(field.getSubFields().get(i))); - } - } - return result.build(); - } -} diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java index ed5524a892..91b8216ffc 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java @@ -19,7 +19,6 @@ import com.google.api.gax.batching.FlowControlSettings; import com.google.api.gax.core.CredentialsProvider; import com.google.api.gax.rpc.TransportChannelProvider; -import com.google.cloud.bigquery.Schema; import com.google.common.base.Preconditions; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.Descriptor; @@ -202,23 +201,6 @@ public static Builder newBuilder(String streamOrTableName, TableSchema tableSche return new Builder(streamOrTableName, tableSchema, null); } - /** - * newBuilder that constructs a JsonStreamWriter builder with BigQuery client being initialized by - * StreamWriter by default. - * - * @param streamOrTableName name of the stream that must follow - * "projects/[^/]+/datasets/[^/]+/tables/[^/]+/streams/[^/]+" - * @param tableSchema The schema of the table when the stream was created, which is passed back - * through {@code WriteStream} - * @return Builder - */ - public static Builder newBuilder(String streamOrTableName, Schema tableSchema) { - Preconditions.checkNotNull(streamOrTableName, "StreamOrTableName is null."); - Preconditions.checkNotNull(tableSchema, "TableSchema is null."); - return new Builder( - streamOrTableName, BQV2ToBQStorageConverter.ConvertTableSchema(tableSchema), null); - } - /** * newBuilder that constructs a JsonStreamWriter builder. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibility.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibility.java deleted file mode 100644 index 238bbbcf34..0000000000 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibility.java +++ /dev/null @@ -1,543 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.cloud.bigquery.storage.v1beta2; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.LegacySQLTypeName; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.Table; -import com.google.cloud.bigquery.TableId; -import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.protobuf.Descriptors; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * A class that checks the schema compatibility between Proto schema in proto descriptor and - * Bigquery table schema. If this check is passed, then user can write to BigQuery table using the - * user schema, otherwise the write will fail. - * - *

The implementation as of now is not complete, which measn, if this check passed, there is - * still a possbility of writing will fail. - */ -public class SchemaCompatibility { - private BigQuery bigquery; - private static SchemaCompatibility compat; - private static String tablePatternString = "projects/([^/]+)/datasets/([^/]+)/tables/([^/]+)"; - private static Pattern tablePattern = Pattern.compile(tablePatternString); - private static final int NestingLimit = 15; - // private static Set SupportedTypesHashSet = - - private static Set SupportedTypes = - Collections.unmodifiableSet( - new HashSet<>( - Arrays.asList( - Descriptors.FieldDescriptor.Type.INT32, - Descriptors.FieldDescriptor.Type.INT64, - Descriptors.FieldDescriptor.Type.UINT32, - Descriptors.FieldDescriptor.Type.UINT64, - Descriptors.FieldDescriptor.Type.FIXED32, - Descriptors.FieldDescriptor.Type.FIXED64, - Descriptors.FieldDescriptor.Type.SFIXED32, - Descriptors.FieldDescriptor.Type.SFIXED64, - Descriptors.FieldDescriptor.Type.FLOAT, - Descriptors.FieldDescriptor.Type.DOUBLE, - Descriptors.FieldDescriptor.Type.BOOL, - Descriptors.FieldDescriptor.Type.BYTES, - Descriptors.FieldDescriptor.Type.STRING, - Descriptors.FieldDescriptor.Type.MESSAGE, - Descriptors.FieldDescriptor.Type.GROUP, - Descriptors.FieldDescriptor.Type.ENUM))); - - private SchemaCompatibility(BigQuery bigquery) { - // TODO: Add functionality that allows SchemaCompatibility to build schemas. - this.bigquery = bigquery; - } - - /** - * Gets a singleton {code SchemaCompatibility} object. - * - * @return - */ - public static SchemaCompatibility getInstance() { - if (compat == null) { - RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); - compat = new SchemaCompatibility(bigqueryHelper.getOptions().getService()); - } - return compat; - } - - /** - * Gets a {code SchemaCompatibility} object with custom BigQuery stub. - * - * @param bigquery - * @return - */ - @VisibleForTesting - public static SchemaCompatibility getInstance(BigQuery bigquery) { - Preconditions.checkNotNull(bigquery, "BigQuery is null."); - return new SchemaCompatibility(bigquery); - } - - private TableId getTableId(String tableName) { - Matcher matcher = tablePattern.matcher(tableName); - if (!matcher.matches() || matcher.groupCount() != 3) { - throw new IllegalArgumentException("Invalid table name: " + tableName); - } - return TableId.of(matcher.group(1), matcher.group(2), matcher.group(3)); - } - - /** - * @param field - * @return True if fieldtype is supported by BQ Schema - */ - public static boolean isSupportedType(Descriptors.FieldDescriptor field) { - Preconditions.checkNotNull(field, "Field is null."); - Descriptors.FieldDescriptor.Type fieldType = field.getType(); - if (!SupportedTypes.contains(fieldType)) { - return false; - } - return true; - } - - private static boolean isCompatibleWithBQBool(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.BOOL - || field == Descriptors.FieldDescriptor.Type.INT32 - || field == Descriptors.FieldDescriptor.Type.INT64 - || field == Descriptors.FieldDescriptor.Type.UINT32 - || field == Descriptors.FieldDescriptor.Type.UINT64 - || field == Descriptors.FieldDescriptor.Type.FIXED32 - || field == Descriptors.FieldDescriptor.Type.FIXED64 - || field == Descriptors.FieldDescriptor.Type.SFIXED32 - || field == Descriptors.FieldDescriptor.Type.SFIXED64) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQBytes(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.BYTES) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQDate(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.INT32 - || field == Descriptors.FieldDescriptor.Type.INT64 - || field == Descriptors.FieldDescriptor.Type.SFIXED32 - || field == Descriptors.FieldDescriptor.Type.SFIXED64) { - - return true; - } - return false; - } - - private static boolean isCompatibleWithBQDatetime(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.STRING - || field == Descriptors.FieldDescriptor.Type.INT64) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQFloat(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.FLOAT) { - return true; - } - if (field == Descriptors.FieldDescriptor.Type.DOUBLE) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQGeography(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.STRING) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQInteger(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.INT64 - || field == Descriptors.FieldDescriptor.Type.SFIXED64 - || field == Descriptors.FieldDescriptor.Type.INT32 - || field == Descriptors.FieldDescriptor.Type.UINT32 - || field == Descriptors.FieldDescriptor.Type.FIXED32 - || field == Descriptors.FieldDescriptor.Type.SFIXED32 - || field == Descriptors.FieldDescriptor.Type.ENUM) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQNumeric(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.INT32 - || field == Descriptors.FieldDescriptor.Type.INT64 - || field == Descriptors.FieldDescriptor.Type.UINT32 - || field == Descriptors.FieldDescriptor.Type.UINT64 - || field == Descriptors.FieldDescriptor.Type.FIXED32 - || field == Descriptors.FieldDescriptor.Type.FIXED64 - || field == Descriptors.FieldDescriptor.Type.SFIXED32 - || field == Descriptors.FieldDescriptor.Type.SFIXED64 - || field == Descriptors.FieldDescriptor.Type.STRING - || field == Descriptors.FieldDescriptor.Type.BYTES - || field == Descriptors.FieldDescriptor.Type.FLOAT - || field == Descriptors.FieldDescriptor.Type.DOUBLE) { - return true; - } - - return false; - } - - private static boolean isCompatibleWithBQRecord(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.MESSAGE - || field == Descriptors.FieldDescriptor.Type.GROUP) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQString(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.STRING - || field == Descriptors.FieldDescriptor.Type.ENUM) { - return true; - } - return false; - } - - private static boolean isCompatibleWithBQTime(Descriptors.FieldDescriptor.Type field) { - if (field == Descriptors.FieldDescriptor.Type.INT64 - || field == Descriptors.FieldDescriptor.Type.STRING) { - - return true; - } - return false; - } - - private static boolean isCompatibleWithBQTimestamp(Descriptors.FieldDescriptor.Type field) { - if (isCompatibleWithBQInteger(field)) { - return true; - } - return false; - } - - /** - * Checks if proto field option is compatible with BQ field mode. - * - * @param protoField - * @param BQField - * @param protoScope Debugging purposes to show error if messages are nested. - * @param BQScope Debugging purposes to show error if messages are nested. - * @throws IllegalArgumentException if proto field type is incompatible with BQ field type. - */ - private void protoFieldModeIsCompatibleWithBQFieldMode( - Descriptors.FieldDescriptor protoField, Field BQField, String protoScope, String BQScope) - throws IllegalArgumentException { - if (BQField.getMode() == null) { - throw new IllegalArgumentException( - "Big query schema contains invalid field option for " + BQScope + "."); - } - switch (BQField.getMode()) { - case REPEATED: - if (!protoField.isRepeated()) { - throw new IllegalArgumentException( - "Given proto field " - + protoScope - + " is not repeated but Big Query field " - + BQScope - + " is."); - } - break; - case REQUIRED: - if (!protoField.isRequired()) { - throw new IllegalArgumentException( - "Given proto field " - + protoScope - + " is not required but Big Query field " - + BQScope - + " is."); - } - break; - case NULLABLE: - if (protoField.isRepeated()) { - throw new IllegalArgumentException( - "Given proto field " - + protoScope - + " is repeated but Big Query field " - + BQScope - + " is optional."); - } - break; - } - } - /** - * Checks if proto field type is compatible with BQ field type. - * - * @param protoField - * @param BQField - * @param allowUnknownFields - * @param protoScope Debugging purposes to show error if messages are nested. - * @param BQScope Debugging purposes to show error if messages are nested. - * @param allMessageTypes Keeps track of all current protos to avoid recursively nested protos. - * @param rootProtoName Debugging purposes for nested level > 15. - * @throws IllegalArgumentException if proto field type is incompatible with BQ field type. - */ - private void protoFieldTypeIsCompatibleWithBQFieldType( - Descriptors.FieldDescriptor protoField, - Field BQField, - boolean allowUnknownFields, - String protoScope, - String BQScope, - HashSet allMessageTypes, - String rootProtoName) - throws IllegalArgumentException { - - LegacySQLTypeName BQType = BQField.getType(); - Descriptors.FieldDescriptor.Type protoType = protoField.getType(); - boolean match = false; - switch (BQType.toString()) { - case "BOOLEAN": - match = isCompatibleWithBQBool(protoType); - break; - case "BYTES": - match = isCompatibleWithBQBytes(protoType); - break; - case "DATE": - match = isCompatibleWithBQDate(protoType); - break; - case "DATETIME": - match = isCompatibleWithBQDatetime(protoType); - break; - case "FLOAT": - match = isCompatibleWithBQFloat(protoType); - break; - case "GEOGRAPHY": - match = isCompatibleWithBQGeography(protoType); - break; - case "INTEGER": - match = isCompatibleWithBQInteger(protoType); - break; - case "NUMERIC": - match = isCompatibleWithBQNumeric(protoType); - break; - case "RECORD": - if (allMessageTypes.size() > NestingLimit) { - throw new IllegalArgumentException( - "Proto schema " - + rootProtoName - + " is not supported: contains nested messages of more than 15 levels."); - } - match = isCompatibleWithBQRecord(protoType); - if (!match) { - break; - } - Descriptors.Descriptor message = protoField.getMessageType(); - if (allMessageTypes.contains(message)) { - throw new IllegalArgumentException( - "Proto schema " + protoScope + " is not supported: is a recursively nested message."); - } - allMessageTypes.add(message); - isProtoCompatibleWithBQ( - protoField.getMessageType(), - Schema.of(BQField.getSubFields()), - allowUnknownFields, - protoScope, - BQScope, - false, - allMessageTypes, - rootProtoName); - allMessageTypes.remove(message); - break; - case "STRING": - match = isCompatibleWithBQString(protoType); - break; - case "TIME": - match = isCompatibleWithBQTime(protoType); - break; - case "TIMESTAMP": - match = isCompatibleWithBQTimestamp(protoType); - break; - } - if (!match) { - throw new IllegalArgumentException( - "The proto field " - + protoScope - + " does not have a matching type with the big query field " - + BQScope - + "."); - } - } - - /** - * Checks if proto schema is compatible with BQ schema. - * - * @param protoSchema - * @param BQSchema - * @param allowUnknownFields - * @param protoScope Debugging purposes to show error if messages are nested. - * @param BQScope Debugging purposes to show error if messages are nested. - * @param topLevel True if this is the root level of proto (in terms of nested messages) - * @param allMessageTypes Keeps track of all current protos to avoid recursively nested protos. - * @param rootProtoName Debugging purposes for nested level > 15. - * @throws IllegalArgumentException if proto field type is incompatible with BQ field type. - */ - private void isProtoCompatibleWithBQ( - Descriptors.Descriptor protoSchema, - Schema BQSchema, - boolean allowUnknownFields, - String protoScope, - String BQScope, - boolean topLevel, - HashSet allMessageTypes, - String rootProtoName) - throws IllegalArgumentException { - - int matchedFields = 0; - HashMap protoFieldMap = new HashMap<>(); - List protoFields = protoSchema.getFields(); - List BQFields = BQSchema.getFields(); - - if (protoFields.size() > BQFields.size()) { - if (!allowUnknownFields) { - throw new IllegalArgumentException( - "Proto schema " - + protoScope - + " has " - + protoFields.size() - + " fields, while BQ schema " - + BQScope - + " has " - + BQFields.size() - + " fields."); - } - } - // Use hashmap to map from lowercased name to appropriate field to account for casing difference - for (Descriptors.FieldDescriptor field : protoFields) { - protoFieldMap.put(field.getName().toLowerCase(), field); - } - - for (Field BQField : BQFields) { - String fieldName = BQField.getName().toLowerCase(); - Descriptors.FieldDescriptor protoField = null; - if (protoFieldMap.containsKey(fieldName)) { - protoField = protoFieldMap.get(fieldName); - } - - String currentBQScope = BQScope + "." + BQField.getName(); - if (protoField == null && BQField.getMode() == Field.Mode.REQUIRED) { - throw new IllegalArgumentException( - "The required Big Query field " - + currentBQScope - + " is missing in the proto schema " - + protoScope - + "."); - } - if (protoField == null) { - continue; - } - String currentProtoScope = protoScope + "." + protoField.getName(); - if (!isSupportedType(protoField)) { - throw new IllegalArgumentException( - "Proto schema " - + currentProtoScope - + " is not supported: contains " - + protoField.getType() - + " field type."); - } - if (protoField.isMapField()) { - throw new IllegalArgumentException( - "Proto schema " + currentProtoScope + " is not supported: is a map field."); - } - protoFieldModeIsCompatibleWithBQFieldMode( - protoField, BQField, currentProtoScope, currentBQScope); - protoFieldTypeIsCompatibleWithBQFieldType( - protoField, - BQField, - allowUnknownFields, - currentProtoScope, - currentBQScope, - allMessageTypes, - rootProtoName); - matchedFields++; - } - - if (matchedFields == 0 && topLevel) { - throw new IllegalArgumentException( - "There is no matching fields found for the proto schema " - + protoScope - + " and the BQ table schema " - + BQScope - + "."); - } - } - - /** - * Checks if proto schema is compatible with BQ schema after retrieving BQ schema by BQTableName. - * - * @param BQTableName Must include project_id, dataset_id, and table_id in the form that matches - * the regex "projects/([^/]+)/datasets/([^/]+)/tables/([^/]+)" - * @param protoSchema - * @param allowUnknownFields Flag indicating proto can have unknown fields. - * @throws IllegalArgumentException if proto field type is incompatible with BQ field type. - */ - public void check( - String BQTableName, Descriptors.Descriptor protoSchema, boolean allowUnknownFields) - throws IllegalArgumentException { - Preconditions.checkNotNull(BQTableName, "TableName is null."); - Preconditions.checkNotNull(protoSchema, "Protobuf descriptor is null."); - - TableId tableId = getTableId(BQTableName); - Table table = bigquery.getTable(tableId); - Schema BQSchema = table.getDefinition().getSchema(); - String protoSchemaName = protoSchema.getName(); - HashSet allMessageTypes = new HashSet<>(); - allMessageTypes.add(protoSchema); - isProtoCompatibleWithBQ( - protoSchema, - BQSchema, - allowUnknownFields, - protoSchemaName, - tableId.getTable(), - true, - allMessageTypes, - protoSchemaName); - } - - /** - * Checks if proto schema is compatible with BQ schema after retrieving BQ schema by BQTableName. - * Assumes allowUnknownFields is false. - * - * @param BQTableName Must include project_id, dataset_id, and table_id in the form that matches - * the regex "projects/([^/]+)/datasets/([^/]+)/tables/([^/]+)" - * @param protoSchema - * @throws IllegalArgumentException if proto field type is incompatible with BQ field type. - */ - public void check(String BQTableName, Descriptors.Descriptor protoSchema) - throws IllegalArgumentException { - - check(BQTableName, protoSchema, false); - } -} diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverterTest.java deleted file mode 100644 index 2cf93caad6..0000000000 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQV2ToBQStorageConverterTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.cloud.bigquery.storage.v1beta2; - -import static org.junit.Assert.*; - -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.cloud.bigquery.storage.test.JsonTest.*; -import com.google.cloud.bigquery.storage.test.SchemaTest.*; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - -@RunWith(JUnit4.class) -public class BQV2ToBQStorageConverterTest { - @Test - public void testStructSimple() throws Exception { - final TableFieldSchema StringType = - TableFieldSchema.newBuilder() - .setType(TableFieldSchema.Type.TIME) - .setMode(TableFieldSchema.Mode.NULLABLE) - .setName("test_field_time") - .build(); - final TableFieldSchema tableFieldSchema = - TableFieldSchema.newBuilder() - .setType(TableFieldSchema.Type.STRUCT) - .setMode(TableFieldSchema.Mode.NULLABLE) - .setName("test_field_type") - .addFields(0, StringType) - .build(); - final TableSchema expectedSchema = - TableSchema.newBuilder().addFields(0, tableFieldSchema).build(); - - Schema v2Schema = - Schema.of( - Field.newBuilder( - "test_field_type", - StandardSQLTypeName.STRUCT, - Field.newBuilder("test_field_time", StandardSQLTypeName.TIME) - .setMode(Field.Mode.NULLABLE) - .build()) - .setMode(Field.Mode.NULLABLE) - .build()); - final TableSchema convertedSchema = BQV2ToBQStorageConverter.ConvertTableSchema(v2Schema); - assertEquals(expectedSchema.toString(), convertedSchema.toString()); - } -} diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java index 4bd9ea91dc..dbc7460163 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java @@ -24,9 +24,6 @@ import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.StandardSQLTypeName; import com.google.cloud.bigquery.storage.test.JsonTest.ComplexRoot; import com.google.cloud.bigquery.storage.test.Test.FooType; import com.google.protobuf.ByteString; @@ -528,13 +525,10 @@ public void testAppendOutOfRangeException() throws Exception { @Test public void testCreateDefaultStream() throws Exception { - Schema v2Schema = - Schema.of( - Field.newBuilder("foo", StandardSQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build()); + TableSchema tableSchema = + TableSchema.newBuilder().addFields(0, TEST_INT).addFields(1, TEST_STRING).build(); try (JsonStreamWriter writer = - JsonStreamWriter.newBuilder(TEST_TABLE, v2Schema) + JsonStreamWriter.newBuilder(TEST_TABLE, tableSchema) .setChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build()) { diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibilityTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibilityTest.java deleted file mode 100644 index ee8761aea3..0000000000 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/SchemaCompatibilityTest.java +++ /dev/null @@ -1,1015 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.cloud.bigquery.storage.v1beta2; - -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - -import com.google.cloud.bigquery.*; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.LegacySQLTypeName; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.Table; -import com.google.cloud.bigquery.storage.test.SchemaTest.*; -import com.google.cloud.bigquery.storage.test.Test.FooType; -import com.google.protobuf.Descriptors; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import javax.annotation.Nullable; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -@RunWith(JUnit4.class) -public class SchemaCompatibilityTest { - @Mock private BigQuery mockBigquery; - @Mock private Table mockBigqueryTable; - Descriptors.Descriptor[] type_descriptors = { - Int32Type.getDescriptor(), - Int64Type.getDescriptor(), - UInt32Type.getDescriptor(), - UInt64Type.getDescriptor(), - Fixed32Type.getDescriptor(), - Fixed64Type.getDescriptor(), - SFixed32Type.getDescriptor(), - SFixed64Type.getDescriptor(), - FloatType.getDescriptor(), - DoubleType.getDescriptor(), - BoolType.getDescriptor(), - BytesType.getDescriptor(), - StringType.getDescriptor(), - EnumType.getDescriptor(), - MessageType.getDescriptor(), - GroupType.getDescriptor() - }; - - @Before - public void setUp() throws IOException { - MockitoAnnotations.initMocks(this); - when(mockBigquery.getTable(any(TableId.class))).thenReturn(mockBigqueryTable); - } - - @After - public void tearDown() { - verifyNoMoreInteractions(mockBigquery); - verifyNoMoreInteractions(mockBigqueryTable); - } - - public void customizeSchema(final Schema schema) { - TableDefinition definition = - new TableDefinition() { - @Override - public Type getType() { - return null; - } - - @Nullable - @Override - public Schema getSchema() { - return schema; - } - - @Override - public Builder toBuilder() { - return null; - } - }; - when(mockBigqueryTable.getDefinition()).thenReturn(definition); - } - - @Test - public void testSuccess() throws Exception { - customizeSchema( - Schema.of( - Field.newBuilder("Foo", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", FooType.getDescriptor(), false); - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testBadTableName() throws Exception { - try { - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("blah", FooType.getDescriptor(), false); - fail("should fail"); - } catch (IllegalArgumentException expected) { - assertEquals("Invalid table name: blah", expected.getMessage()); - } - } - - @Test - public void testSupportedTypes() { - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - for (Descriptors.FieldDescriptor field : SupportedTypes.getDescriptor().getFields()) { - assertTrue(compact.isSupportedType(field)); - } - - for (Descriptors.FieldDescriptor field : NonSupportedTypes.getDescriptor().getFields()) { - assertFalse(compact.isSupportedType(field)); - } - } - - @Test - public void testMap() { - customizeSchema( - Schema.of( - Field.newBuilder("map_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - Descriptors.Descriptor testMap = NonSupportedMap.getDescriptor(); - String protoName = testMap.getName() + ".map_value"; - try { - compact.check("projects/p/datasets/d/tables/t", testMap, false); - fail("Should not be supported: field contains map"); - } catch (IllegalArgumentException expected) { - assertEquals( - "Proto schema " + protoName + " is not supported: is a map field.", - expected.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testNestingSupportedSimple() { - Field BQSupportedNestingLvl2 = - Field.newBuilder("int_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build(); - customizeSchema( - Schema.of( - Field.newBuilder("int_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build(), - Field.newBuilder("nesting_value", LegacySQLTypeName.RECORD, BQSupportedNestingLvl2) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - Descriptors.Descriptor testNesting = SupportedNestingLvl1.getDescriptor(); - try { - compact.check("projects/p/datasets/d/tables/t", testNesting, false); - } catch (Exception e) { - fail(e.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testNestingSupportedStacked() { - Field BQSupportedNestingLvl2 = - Field.newBuilder("int_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build(); - customizeSchema( - Schema.of( - Field.newBuilder("int_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build(), - Field.newBuilder("nesting_value1", LegacySQLTypeName.RECORD, BQSupportedNestingLvl2) - .setMode(Field.Mode.NULLABLE) - .build(), - Field.newBuilder("nesting_value2", LegacySQLTypeName.RECORD, BQSupportedNestingLvl2) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - Descriptors.Descriptor testNesting = SupportedNestingStacked.getDescriptor(); - try { - compact.check("projects/p/datasets/d/tables/t", testNesting, false); - } catch (Exception e) { - fail(e.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - /* - * This is not the "exact" test, as BigQuery fields cannot be recursive. Instead, this test uses - * two DIFFERENT records with the same name to simulate recursive protos (protos can't have the - * same name anyways unless they are the same proto). - */ - @Test - public void testNestingContainsRecursive() { - Field BQNonSupportedNestingRecursive = - Field.newBuilder( - "nesting_value", - LegacySQLTypeName.RECORD, - Field.newBuilder("int_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build()) - .setMode(Field.Mode.NULLABLE) - .build(); - - customizeSchema( - Schema.of( - Field.newBuilder("int_value", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build(), - Field.newBuilder( - "nesting_value", LegacySQLTypeName.RECORD, BQNonSupportedNestingRecursive) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - Descriptors.Descriptor testNesting = NonSupportedNestingContainsRecursive.getDescriptor(); - try { - compact.check("projects/p/datasets/d/tables/t", testNesting, false); - fail("Should not be supported: contains nested messages of more than 15 levels."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Proto schema " - + testNesting.getName() - + ".nesting_value.nesting_value is not supported: is a recursively nested message.", - expected.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testNestingRecursiveLimit() { - Field NonSupportedNestingLvl16 = - Field.newBuilder("test1", LegacySQLTypeName.INTEGER).setMode(Field.Mode.NULLABLE).build(); - Field NonSupportedNestingLvl15 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl16) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl14 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl15) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl13 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl14) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl12 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl13) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl11 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl12) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl10 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl11) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl9 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl10) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl8 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl9) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl7 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl8) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl6 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl7) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl5 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl6) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl4 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl5) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl3 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl4) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl2 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl3) - .setMode(Field.Mode.NULLABLE) - .build(); - Field NonSupportedNestingLvl1 = - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl2) - .setMode(Field.Mode.NULLABLE) - .build(); - customizeSchema( - Schema.of( - Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl1) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - Descriptors.Descriptor testNesting = NonSupportedNestingLvl0.getDescriptor(); - try { - compact.check("projects/p/datasets/d/tables/t", testNesting, false); - fail("Should not be supported: contains nested messages of more than 15 levels."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Proto schema " - + testNesting.getName() - + " is not supported: contains nested messages of more than 15 levels.", - expected.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testProtoMoreFields() { - Schema customSchema = Schema.of(Field.of("int32_value", LegacySQLTypeName.INTEGER)); - customizeSchema(customSchema); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - - try { - compact.check("projects/p/datasets/d/tables/t", SupportedTypes.getDescriptor(), false); - fail("Should fail: proto has more fields and allowUnknownFields flag is false."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Proto schema " - + SupportedTypes.getDescriptor().getName() - + " has " - + SupportedTypes.getDescriptor().getFields().size() - + " fields, while BQ schema t has " - + 1 - + " fields.", - expected.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testBQRepeated() { - customizeSchema( - Schema.of( - Field.newBuilder("repeated_mode", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.REPEATED) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", ProtoRepeatedBQRepeated.getDescriptor(), false); - try { - compact.check( - "projects/p/datasets/d/tables/t", ProtoOptionalBQRepeated.getDescriptor(), false); - fail("Should fail: BQ schema is repeated, but proto is optional."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Given proto field " - + ProtoOptionalBQRepeated.getDescriptor().getName() - + ".repeated_mode" - + " is not repeated but Big Query field t.repeated_mode is.", - expected.getMessage()); - } - - try { - compact.check( - "projects/p/datasets/d/tables/t", ProtoRequiredBQRepeated.getDescriptor(), false); - fail("Should fail: BQ schema is repeated, but proto is required."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Given proto field " - + ProtoRequiredBQRepeated.getDescriptor().getName() - + ".repeated_mode" - + " is not repeated but Big Query field t.repeated_mode is.", - expected.getMessage()); - } - verify(mockBigquery, times(3)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(3)).getDefinition(); - } - - @Test - public void testBQRequired() { - customizeSchema( - Schema.of( - Field.newBuilder("required_mode", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.REQUIRED) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", ProtoRequiredBQRequired.getDescriptor(), false); - - try { - compact.check("projects/p/datasets/d/tables/t", ProtoNoneBQRequired.getDescriptor(), false); - fail("Should fail: BQ schema is required, but proto does not have this field."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The required Big Query field t.required_mode is missing in the proto schema " - + ProtoNoneBQRequired.getDescriptor().getName() - + ".", - expected.getMessage()); - } - - try { - compact.check( - "projects/p/datasets/d/tables/t", ProtoOptionalBQRequired.getDescriptor(), false); - fail("Should fail: BQ schema is required, but proto is optional."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Given proto field " - + ProtoOptionalBQRequired.getDescriptor().getName() - + ".required_mode is not required but Big Query field t.required_mode is.", - expected.getMessage()); - } - - try { - compact.check( - "projects/p/datasets/d/tables/t", ProtoRepeatedBQRequired.getDescriptor(), false); - fail("Should fail: BQ schema is required, but proto is repeated."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Given proto field " - + ProtoRepeatedBQRequired.getDescriptor().getName() - + ".required_mode is not required but Big Query field t.required_mode is.", - expected.getMessage()); - } - verify(mockBigquery, times(4)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(4)).getDefinition(); - } - - @Test - public void testBQOptional() { - customizeSchema( - Schema.of( - Field.newBuilder("optional_mode", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", ProtoOptionalBQOptional.getDescriptor(), false); - compact.check("projects/p/datasets/d/tables/t", ProtoRequiredBQOptional.getDescriptor(), false); - - try { - compact.check( - "projects/p/datasets/d/tables/t", ProtoRepeatedBQOptional.getDescriptor(), false); - fail("Should fail: BQ schema is nullable, but proto field is repeated."); - } catch (IllegalArgumentException expected) { - assertEquals( - "Given proto field " - + ProtoRepeatedBQOptional.getDescriptor().getName() - + ".optional_mode is repeated but Big Query field t.optional_mode is optional.", - expected.getMessage()); - } - - verify(mockBigquery, times(3)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(3)).getDefinition(); - } - - @Test - public void testBQBool() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.BOOLEAN) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>( - Arrays.asList( - Int32Type.getDescriptor(), - Int64Type.getDescriptor(), - UInt32Type.getDescriptor(), - UInt64Type.getDescriptor(), - Fixed32Type.getDescriptor(), - Fixed64Type.getDescriptor(), - SFixed32Type.getDescriptor(), - SFixed64Type.getDescriptor(), - BoolType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Boolean."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQBytes() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.BYTES) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(BytesType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Bytes."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQDate() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.DATE) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>( - Arrays.asList( - Int32Type.getDescriptor(), - Int64Type.getDescriptor(), - SFixed32Type.getDescriptor(), - SFixed64Type.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Date."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQDatetime() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.DATETIME) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(Int64Type.getDescriptor(), StringType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Datetime."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQFloat() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.FLOAT) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(FloatType.getDescriptor(), DoubleType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Float."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQGeography() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.GEOGRAPHY) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(StringType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Geography."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQInteger() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>( - Arrays.asList( - Int32Type.getDescriptor(), - Int64Type.getDescriptor(), - UInt32Type.getDescriptor(), - Fixed32Type.getDescriptor(), - SFixed32Type.getDescriptor(), - SFixed64Type.getDescriptor(), - EnumType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Integer."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQNumeric() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.NUMERIC) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>( - Arrays.asList( - Int32Type.getDescriptor(), - Int64Type.getDescriptor(), - UInt32Type.getDescriptor(), - UInt64Type.getDescriptor(), - Fixed32Type.getDescriptor(), - Fixed64Type.getDescriptor(), - SFixed32Type.getDescriptor(), - SFixed64Type.getDescriptor(), - BytesType.getDescriptor(), - StringType.getDescriptor(), - FloatType.getDescriptor(), - DoubleType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Numeric."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQRecord() { - Field nestedMessage = - Field.newBuilder("test_field_type", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build(); - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.RECORD, nestedMessage) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(MessageType.getDescriptor(), GroupType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ String."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQRecordMismatch() { - Field nestedMessage1 = - Field.newBuilder("test_field_type", LegacySQLTypeName.INTEGER) - .setMode(Field.Mode.NULLABLE) - .build(); - Field nestedMessage0 = - Field.newBuilder("mismatchlvl1", LegacySQLTypeName.RECORD, nestedMessage1) - .setMode(Field.Mode.NULLABLE) - .build(); - customizeSchema( - Schema.of( - Field.newBuilder("mismatchlvl0", LegacySQLTypeName.RECORD, nestedMessage0) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - try { - compact.check("projects/p/datasets/d/tables/t", MessageTypeMismatch.getDescriptor(), false); - fail("Should fail: Proto schema type should not match BQ String."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + MessageTypeMismatch.getDescriptor().getName() - + ".mismatchlvl0.mismatchlvl1.test_field_type does not have a matching type with the big query field t.mismatchlvl0.mismatchlvl1.test_field_type.", - expected.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testBQRecordMatch() { - Field nestedMessage1 = - Field.newBuilder("test_field_type", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build(); - Field nestedMessage0 = - Field.newBuilder("mismatchlvl1", LegacySQLTypeName.RECORD, nestedMessage1) - .setMode(Field.Mode.NULLABLE) - .build(); - customizeSchema( - Schema.of( - Field.newBuilder("mismatchlvl0", LegacySQLTypeName.RECORD, nestedMessage0) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", MessageTypeMismatch.getDescriptor(), false); - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testBQString() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(StringType.getDescriptor(), EnumType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ String."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQTime() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.TIME) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>(Arrays.asList(Int64Type.getDescriptor(), StringType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Time."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - @Test - public void testBQTimestamp() { - customizeSchema( - Schema.of( - Field.newBuilder("test_field_type", LegacySQLTypeName.TIMESTAMP) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - HashSet compatible = - new HashSet<>( - Arrays.asList( - Int32Type.getDescriptor(), - Int64Type.getDescriptor(), - UInt32Type.getDescriptor(), - Fixed32Type.getDescriptor(), - SFixed32Type.getDescriptor(), - SFixed64Type.getDescriptor(), - EnumType.getDescriptor())); - - for (Descriptors.Descriptor descriptor : type_descriptors) { - if (compatible.contains(descriptor)) { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - } else { - try { - compact.check("projects/p/datasets/d/tables/t", descriptor, false); - fail("Should fail: Proto schema type should not match BQ Timestamp."); - } catch (IllegalArgumentException expected) { - assertEquals( - "The proto field " - + descriptor.getName() - + ".test_field_type does not have a matching type with the big query field t.test_field_type.", - expected.getMessage()); - } - } - } - verify(mockBigquery, times(16)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(16)).getDefinition(); - } - - /* - * Tests if having no matching fields in the top level causes an error. - */ - @Test - public void testBQTopLevelMismatch() { - customizeSchema( - Schema.of( - Field.newBuilder("test_toplevel_mismatch", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - try { - compact.check("projects/p/datasets/d/tables/t", StringType.getDescriptor(), false); - } catch (IllegalArgumentException expected) { - assertEquals( - "There is no matching fields found for the proto schema " - + StringType.getDescriptor().getName() - + " and the BQ table schema t.", - expected.getMessage()); - } - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - /* - * Tests if there is at least 1 matching field in the top level. - */ - @Test - public void testBQTopLevelMatch() { - Field nestedMessage0 = - Field.newBuilder("mismatch", LegacySQLTypeName.STRING).setMode(Field.Mode.NULLABLE).build(); - customizeSchema( - Schema.of( - Field.newBuilder("mismatch", LegacySQLTypeName.RECORD, nestedMessage0) - .setMode(Field.Mode.NULLABLE) - .build(), - Field.newBuilder("match", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", TopLevelMatch.getDescriptor(), false); - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testAllowUnknownUnsupportedFields() { - customizeSchema( - Schema.of( - Field.newBuilder("string_value", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check( - "projects/p/datasets/d/tables/t", AllowUnknownUnsupportedFields.getDescriptor(), true); - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } - - @Test - public void testLowerCase() { - customizeSchema( - Schema.of( - Field.newBuilder("tEsT_fIeLd_TyPe", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .build())); - SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); - compact.check("projects/p/datasets/d/tables/t", StringType.getDescriptor(), true); - verify(mockBigquery, times(1)).getTable(any(TableId.class)); - verify(mockBigqueryTable, times(1)).getDefinition(); - } -} diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java index 2738bc0db7..ef6ec2d953 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java @@ -35,7 +35,9 @@ import com.google.cloud.bigquery.storage.v1beta2.BigDecimalByteStringEncoder; import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient; import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter; +import com.google.cloud.bigquery.storage.v1beta2.TableFieldSchema; import com.google.cloud.bigquery.storage.v1beta2.TableName; +import com.google.cloud.bigquery.storage.v1beta2.TableSchema; import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; import com.google.protobuf.Descriptors; import java.io.IOException; @@ -103,9 +105,32 @@ public void TestBigDecimalEncoding() throws IOException, InterruptedException, ExecutionException, Descriptors.DescriptorValidationException { TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, TABLE); + TableFieldSchema TEST_NUMERIC_ZERO = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.NUMERIC) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_numeric_zero") + .build(); + TableFieldSchema TEST_NUMERIC_ONE = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.NUMERIC) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_numeric_one") + .build(); + TableFieldSchema TEST_NUMERIC_REPEATED = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.NUMERIC) + .setMode(TableFieldSchema.Mode.REPEATED) + .setName("test_numeric_repeated") + .build(); + TableSchema tableSchema = + TableSchema.newBuilder() + .addFields(0, TEST_NUMERIC_ZERO) + .addFields(1, TEST_NUMERIC_ONE) + .addFields(2, TEST_NUMERIC_REPEATED) + .build(); try (JsonStreamWriter jsonStreamWriter = - JsonStreamWriter.newBuilder(parent.toString(), tableInfo.getDefinition().getSchema()) - .build()) { + JsonStreamWriter.newBuilder(parent.toString(), tableSchema).build()) { JSONObject row = new JSONObject(); row.put( "test_numeric_zero", diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java index e367a2b29e..4906de9c64 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java @@ -34,7 +34,9 @@ import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient; import com.google.cloud.bigquery.storage.v1beta2.CivilTimeEncoder; import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter; +import com.google.cloud.bigquery.storage.v1beta2.TableFieldSchema; import com.google.cloud.bigquery.storage.v1beta2.TableName; +import com.google.cloud.bigquery.storage.v1beta2.TableSchema; import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; import com.google.protobuf.Descriptors; import java.io.IOException; @@ -102,9 +104,32 @@ public void TestTimeEncoding() throws IOException, InterruptedException, ExecutionException, Descriptors.DescriptorValidationException { TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, TABLE); + TableFieldSchema TEST_STRING = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.STRING) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_str") + .build(); + TableFieldSchema TEST_TIME = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.TIME) + .setMode(TableFieldSchema.Mode.REPEATED) + .setName("test_time_micros") + .build(); + TableFieldSchema TEST_DATETIME = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.DATETIME) + .setMode(TableFieldSchema.Mode.REPEATED) + .setName("test_datetime_micros") + .build(); + TableSchema tableSchema = + TableSchema.newBuilder() + .addFields(0, TEST_STRING) + .addFields(1, TEST_TIME) + .addFields(2, TEST_DATETIME) + .build(); try (JsonStreamWriter jsonStreamWriter = - JsonStreamWriter.newBuilder(parent.toString(), tableInfo.getDefinition().getSchema()) - .build()) { + JsonStreamWriter.newBuilder(parent.toString(), tableSchema).build()) { JSONObject row = new JSONObject(); row.put("test_str", "Start of the day"); row.put( diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java index 24132addc7..b93eeeaf34 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java @@ -320,6 +320,30 @@ public void testJsonStreamWriterWithDefaultStream() throws IOException, InterruptedException, ExecutionException, Descriptors.DescriptorValidationException { String tableName = "JsonTableDefaultStream"; + TableFieldSchema TEST_STRING = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.STRING) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_str") + .build(); + TableFieldSchema TEST_NUMERIC = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.NUMERIC) + .setMode(TableFieldSchema.Mode.REPEATED) + .setName("test_numerics") + .build(); + TableFieldSchema TEST_DATE = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.DATETIME) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_datetime") + .build(); + TableSchema tableSchema = + TableSchema.newBuilder() + .addFields(0, TEST_STRING) + .addFields(1, TEST_DATE) + .addFields(2, TEST_NUMERIC) + .build(); TableInfo tableInfo = TableInfo.newBuilder( TableId.of(DATASET, tableName), @@ -339,8 +363,7 @@ public void testJsonStreamWriterWithDefaultStream() bigquery.create(tableInfo); TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, tableName); try (JsonStreamWriter jsonStreamWriter = - JsonStreamWriter.newBuilder(parent.toString(), tableInfo.getDefinition().getSchema()) - .build()) { + JsonStreamWriter.newBuilder(parent.toString(), tableSchema).build()) { LOG.info("Sending one message"); JSONObject row1 = new JSONObject(); row1.put("test_str", "aaa"); diff --git a/owlbot.py b/owlbot.py index fc6eaf1ed8..247761ca5e 100644 --- a/owlbot.py +++ b/owlbot.py @@ -27,5 +27,6 @@ ".kokoro/build.sh", ".kokoro/nightly/samples.cfg", ".kokoro/presubmit/samples.cfg", + ".kokoro/dependencies.sh" ] ) diff --git a/pom.xml b/pom.xml index 731348acaf..db9c02034b 100644 --- a/pom.xml +++ b/pom.xml @@ -80,11 +80,6 @@ pom import - - com.google.cloud - google-cloud-bigquery - 2.1.7 - com.google.api.grpc proto-google-cloud-bigquerystorage-v1beta1 @@ -134,6 +129,12 @@ 4.13.2 test + + com.google.cloud + google-cloud-bigquery + 2.1.6 + test + diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index db5f5ed305..5327420ca0 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -34,12 +34,16 @@ + + com.google.cloud + google-cloud-bigquery + 2.1.6 + org.apache.avro avro 1.10.2 - org.apache.arrow arrow-vector diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 98ca9af0ee..11fc613b58 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -33,6 +33,11 @@ + + com.google.cloud + google-cloud-bigquery + 2.1.6 + org.apache.avro avro diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 44197a2af7..3098976c6c 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -45,12 +45,16 @@ + + com.google.cloud + google-cloud-bigquery + 2.1.6 + org.apache.avro avro 1.10.2 - org.apache.arrow arrow-vector diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java index 1f79c02a51..fac93cde69 100644 --- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java +++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java @@ -18,13 +18,11 @@ // [START bigquerystorage_jsonstreamwriter_default] import com.google.api.core.ApiFuture; -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQueryOptions; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse; import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter; +import com.google.cloud.bigquery.storage.v1beta2.TableFieldSchema; import com.google.cloud.bigquery.storage.v1beta2.TableName; +import com.google.cloud.bigquery.storage.v1beta2.TableSchema; import com.google.protobuf.Descriptors.DescriptorValidationException; import java.io.IOException; import java.util.concurrent.ExecutionException; @@ -39,27 +37,31 @@ public static void runWriteToDefaultStream() String projectId = "MY_PROJECT_ID"; String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; - - writeToDefaultStream(projectId, datasetName, tableName); + TableFieldSchema strField = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.STRING) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_string") + .build(); + TableSchema tableSchema = TableSchema.newBuilder().addFields(0, strField).build(); + writeToDefaultStream(projectId, datasetName, tableName, tableSchema); } - public static void writeToDefaultStream(String projectId, String datasetName, String tableName) + public static void writeToDefaultStream( + String projectId, String datasetName, String tableName, TableSchema tableSchema) throws DescriptorValidationException, InterruptedException, IOException { - BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - Table table = bigquery.getTable(datasetName, tableName); TableName parentTable = TableName.of(projectId, datasetName, tableName); - Schema schema = table.getDefinition().getSchema(); // Use the JSON stream writer to send records in JSON format. // For more information about JsonStreamWriter, see: // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html try (JsonStreamWriter writer = - JsonStreamWriter.newBuilder(parentTable.toString(), schema).build()) { + JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build()) { // Append 10 JSON objects to the stream. for (int i = 0; i < 10; i++) { // Create a JSON object that is compatible with the table schema. JSONObject record = new JSONObject(); - record.put("col1", String.format("record %03d", i)); + record.put("test_string", String.format("record %03d", i)); JSONArray jsonArr = new JSONArray(); jsonArr.put(record); diff --git a/samples/snippets/src/test/java/com/example/bigquerystorage/WriteToDefaultStreamIT.java b/samples/snippets/src/test/java/com/example/bigquerystorage/WriteToDefaultStreamIT.java index a0047cbb32..94d29f125c 100644 --- a/samples/snippets/src/test/java/com/example/bigquerystorage/WriteToDefaultStreamIT.java +++ b/samples/snippets/src/test/java/com/example/bigquerystorage/WriteToDefaultStreamIT.java @@ -30,6 +30,8 @@ import com.google.cloud.bigquery.StandardTableDefinition; import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.storage.v1beta2.TableFieldSchema; +import com.google.cloud.bigquery.storage.v1beta2.TableSchema; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.UUID; @@ -50,6 +52,7 @@ public class WriteToDefaultStreamIT { private BigQuery bigquery; private String datasetName; private String tableName; + private TableSchema tableSchema; private static void requireEnvVar(String varName) { assertNotNull( @@ -73,10 +76,23 @@ public void setUp() { // Create a new dataset and table for each test. datasetName = "WRITE_STREAM_TEST" + UUID.randomUUID().toString().substring(0, 8); tableName = "DEFAULT_STREAM_TEST" + UUID.randomUUID().toString().substring(0, 8); - Schema schema = Schema.of(Field.of("col1", StandardSQLTypeName.STRING)); bigquery.create(DatasetInfo.newBuilder(datasetName).build()); + TableFieldSchema strField = + TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.STRING) + .setMode(TableFieldSchema.Mode.NULLABLE) + .setName("test_string") + .build(); + tableSchema = TableSchema.newBuilder().addFields(0, strField).build(); TableInfo tableInfo = - TableInfo.newBuilder(TableId.of(datasetName, tableName), StandardTableDefinition.of(schema)) + TableInfo.newBuilder( + TableId.of(datasetName, tableName), + StandardTableDefinition.of( + Schema.of( + com.google.cloud.bigquery.Field.newBuilder( + "test_string", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()))) .build(); bigquery.create(tableInfo); } @@ -90,7 +106,8 @@ public void tearDown() { @Test public void testWriteToDefaultStream() throws Exception { - WriteToDefaultStream.writeToDefaultStream(GOOGLE_CLOUD_PROJECT, datasetName, tableName); + WriteToDefaultStream.writeToDefaultStream( + GOOGLE_CLOUD_PROJECT, datasetName, tableName, tableSchema); assertThat(bout.toString()).contains("Appended records successfully."); } }