diff --git a/google-cloud-bigquerystorage/clirr-ignored-differences.xml b/google-cloud-bigquerystorage/clirr-ignored-differences.xml index c1328feea2..847badd79d 100644 --- a/google-cloud-bigquerystorage/clirr-ignored-differences.xml +++ b/google-cloud-bigquerystorage/clirr-ignored-differences.xml @@ -12,4 +12,20 @@ com/google/cloud/bigquery/storage/v1alpha2/StreamWriter boolean awaitTermination(long, java.util.concurrent.TimeUnit) - \ No newline at end of file + + 7005 + com/google/cloud/bigquery/storage/v1alpha2/DirectWriter + void testSetStub(com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteClient, int, com.google.cloud.bigquery.storage.v1alpha2.SchemaCompact) + void testSetStub(com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteClient, int, com.google.cloud.bigquery.storage.v1alpha2.SchemaCompatibility) + + + 8001 + com/google/cloud/bigquery/storage/v1alpha2/SchemaCompact + + + 7005 + com/google/cloud/bigquery/storage/v1alpha2/WriterCache + com.google.cloud.bigquery.storage.v1alpha2.WriterCache getTestInstance(com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteClient, int, com.google.cloud.bigquery.storage.v1alpha2.SchemaCompact) + com.google.cloud.bigquery.storage.v1alpha2.WriterCache getTestInstance(com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteClient, int, com.google.cloud.bigquery.storage.v1alpha2.SchemaCompatibility) + + diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriter.java index 4b3032b615..3e1a57c715 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriter.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriter.java @@ -99,7 +99,7 @@ public Long apply(Storage.AppendRowsResponse appendRowsResponse) { @VisibleForTesting public static void testSetStub( - BigQueryWriteClient stub, int maxTableEntry, SchemaCompact schemaCheck) { + BigQueryWriteClient stub, int maxTableEntry, SchemaCompatibility schemaCheck) { cache = WriterCache.getTestInstance(stub, maxTableEntry, schemaCheck); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompact.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompatibility.java similarity index 96% rename from google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompact.java rename to google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompatibility.java index 00c370c800..6ce3414134 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompact.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompatibility.java @@ -41,9 +41,9 @@ *

The implementation as of now is not complete, which measn, if this check passed, there is * still a possbility of writing will fail. */ -public class SchemaCompact { +public class SchemaCompatibility { private BigQuery bigquery; - private static SchemaCompact compact; + private static SchemaCompatibility compact; private static String tablePatternString = "projects/([^/]+)/datasets/([^/]+)/tables/([^/]+)"; private static Pattern tablePattern = Pattern.compile(tablePatternString); private static final int NestingLimit = 15; @@ -70,33 +70,33 @@ public class SchemaCompact { Descriptors.FieldDescriptor.Type.GROUP, Descriptors.FieldDescriptor.Type.ENUM))); - private SchemaCompact(BigQuery bigquery) { - // TODO: Add functionality that allows SchemaCompact to build schemas. + private SchemaCompatibility(BigQuery bigquery) { + // TODO: Add functionality that allows SchemaCompatibility to build schemas. this.bigquery = bigquery; } /** - * Gets a singleton {code SchemaCompact} object. + * Gets a singleton {code SchemaCompatibility} object. * * @return */ - public static SchemaCompact getInstance() { + public static SchemaCompatibility getInstance() { if (compact == null) { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); - compact = new SchemaCompact(bigqueryHelper.getOptions().getService()); + compact = new SchemaCompatibility(bigqueryHelper.getOptions().getService()); } return compact; } /** - * Gets a {code SchemaCompact} object with custom BigQuery stub. + * Gets a {code SchemaCompatibility} object with custom BigQuery stub. * * @param bigquery * @return */ @VisibleForTesting - public static SchemaCompact getInstance(BigQuery bigquery) { - return new SchemaCompact(bigquery); + public static SchemaCompatibility getInstance(BigQuery bigquery) { + return new SchemaCompatibility(bigquery); } private TableId getTableId(String tableName) { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCache.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCache.java index 68eb59d4af..a8e48e0759 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCache.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCache.java @@ -47,9 +47,9 @@ public class WriterCache { private static final int MAX_WRITERS_PER_TABLE = 2; private final BigQueryWriteClient stub; - private final SchemaCompact compact; + private final SchemaCompatibility compact; - private WriterCache(BigQueryWriteClient stub, int maxTableEntry, SchemaCompact compact) { + private WriterCache(BigQueryWriteClient stub, int maxTableEntry, SchemaCompatibility compact) { this.stub = stub; this.compact = compact; writerCache = @@ -71,7 +71,7 @@ public static WriterCache getInstance() throws IOException { if (instance == null) { BigQueryWriteSettings stubSettings = BigQueryWriteSettings.newBuilder().build(); BigQueryWriteClient stub = BigQueryWriteClient.create(stubSettings); - instance = new WriterCache(stub, MAX_TABLE_ENTRY, SchemaCompact.getInstance()); + instance = new WriterCache(stub, MAX_TABLE_ENTRY, SchemaCompatibility.getInstance()); } return instance; } @@ -79,7 +79,7 @@ public static WriterCache getInstance() throws IOException { /** Returns a cache with custom stub used by test. */ @VisibleForTesting public static WriterCache getTestInstance( - BigQueryWriteClient stub, int maxTableEntry, SchemaCompact compact) { + BigQueryWriteClient stub, int maxTableEntry, SchemaCompatibility compact) { return new WriterCache(stub, maxTableEntry, compact); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriterTest.java index c3b059c777..f57ac92339 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriterTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/DirectWriterTest.java @@ -53,7 +53,7 @@ public class DirectWriterTest { private BigQueryWriteClient client; private LocalChannelProvider channelProvider; - @Mock private static SchemaCompact schemaCheck; + @Mock private static SchemaCompatibility schemaCheck; @BeforeClass public static void startStaticServer() { diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompactTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompatibilityTest.java similarity index 93% rename from google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompactTest.java rename to google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompatibilityTest.java index 259bc59da0..853fbe6711 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompactTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/SchemaCompatibilityTest.java @@ -39,7 +39,7 @@ import org.mockito.MockitoAnnotations; @RunWith(JUnit4.class) -public class SchemaCompactTest { +public class SchemaCompatibilityTest { @Mock private BigQuery mockBigquery; @Mock private Table mockBigqueryTable; Descriptors.Descriptor[] type_descriptors = { @@ -102,7 +102,7 @@ public void testSuccess() throws Exception { Field.newBuilder("Foo", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", FooType.getDescriptor(), false); verify(mockBigquery, times(1)).getTable(any(TableId.class)); verify(mockBigqueryTable, times(1)).getDefinition(); @@ -111,7 +111,7 @@ public void testSuccess() throws Exception { @Test public void testBadTableName() throws Exception { try { - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("blah", FooType.getDescriptor(), false); fail("should fail"); } catch (IllegalArgumentException expected) { @@ -121,7 +121,7 @@ public void testBadTableName() throws Exception { @Test public void testSupportedTypes() { - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); for (Descriptors.FieldDescriptor field : SupportedTypes.getDescriptor().getFields()) { assertTrue(compact.isSupportedType(field)); } @@ -138,7 +138,7 @@ public void testMap() { Field.newBuilder("map_value", LegacySQLTypeName.INTEGER) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); Descriptors.Descriptor testMap = NonSupportedMap.getDescriptor(); String protoName = testMap.getName() + ".map_value"; try { @@ -167,7 +167,7 @@ public void testNestingSupportedSimple() { Field.newBuilder("nesting_value", LegacySQLTypeName.RECORD, BQSupportedNestingLvl2) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); Descriptors.Descriptor testNesting = SupportedNestingLvl1.getDescriptor(); try { compact.check("projects/p/datasets/d/tables/t", testNesting, false); @@ -195,7 +195,7 @@ public void testNestingSupportedStacked() { Field.newBuilder("nesting_value2", LegacySQLTypeName.RECORD, BQSupportedNestingLvl2) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); Descriptors.Descriptor testNesting = SupportedNestingStacked.getDescriptor(); try { compact.check("projects/p/datasets/d/tables/t", testNesting, false); @@ -232,7 +232,7 @@ public void testNestingContainsRecursive() { "nesting_value", LegacySQLTypeName.RECORD, BQNonSupportedNestingRecursive) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); Descriptors.Descriptor testNesting = NonSupportedNestingContainsRecursive.getDescriptor(); try { compact.check("projects/p/datasets/d/tables/t", testNesting, false); @@ -317,7 +317,7 @@ public void testNestingRecursiveLimit() { Field.newBuilder("test1", LegacySQLTypeName.RECORD, NonSupportedNestingLvl1) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); Descriptors.Descriptor testNesting = NonSupportedNestingLvl0.getDescriptor(); try { compact.check("projects/p/datasets/d/tables/t", testNesting, false); @@ -337,7 +337,7 @@ public void testNestingRecursiveLimit() { public void testProtoMoreFields() { Schema customSchema = Schema.of(Field.of("int32_value", LegacySQLTypeName.INTEGER)); customizeSchema(customSchema); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); try { compact.check("projects/p/datasets/d/tables/t", SupportedTypes.getDescriptor(), false); @@ -364,7 +364,7 @@ public void testBQRepeated() { Field.newBuilder("repeated_mode", LegacySQLTypeName.INTEGER) .setMode(Field.Mode.REPEATED) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", ProtoRepeatedBQRepeated.getDescriptor(), false); try { compact.check( @@ -402,7 +402,7 @@ public void testBQRequired() { Field.newBuilder("required_mode", LegacySQLTypeName.INTEGER) .setMode(Field.Mode.REQUIRED) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", ProtoRequiredBQRequired.getDescriptor(), false); try { @@ -450,7 +450,7 @@ public void testBQOptional() { Field.newBuilder("optional_mode", LegacySQLTypeName.INTEGER) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", ProtoOptionalBQOptional.getDescriptor(), false); compact.check("projects/p/datasets/d/tables/t", ProtoRequiredBQOptional.getDescriptor(), false); @@ -477,7 +477,7 @@ public void testBQBool() { Field.newBuilder("test_field_type", LegacySQLTypeName.BOOLEAN) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>( Arrays.asList( @@ -518,7 +518,7 @@ public void testBQBytes() { Field.newBuilder("test_field_type", LegacySQLTypeName.BYTES) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(BytesType.getDescriptor())); @@ -549,7 +549,7 @@ public void testBQDate() { Field.newBuilder("test_field_type", LegacySQLTypeName.DATE) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>( Arrays.asList( @@ -585,7 +585,7 @@ public void testBQDatetime() { Field.newBuilder("test_field_type", LegacySQLTypeName.DATETIME) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(Int64Type.getDescriptor(), SFixed64Type.getDescriptor())); @@ -616,7 +616,7 @@ public void testBQFloat() { Field.newBuilder("test_field_type", LegacySQLTypeName.FLOAT) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(FloatType.getDescriptor(), DoubleType.getDescriptor())); @@ -647,7 +647,7 @@ public void testBQGeography() { Field.newBuilder("test_field_type", LegacySQLTypeName.GEOGRAPHY) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(BytesType.getDescriptor())); @@ -678,7 +678,7 @@ public void testBQInteger() { Field.newBuilder("test_field_type", LegacySQLTypeName.INTEGER) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>( Arrays.asList( @@ -717,7 +717,7 @@ public void testBQNumeric() { Field.newBuilder("test_field_type", LegacySQLTypeName.NUMERIC) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>( Arrays.asList( @@ -762,7 +762,7 @@ public void testBQRecord() { Field.newBuilder("test_field_type", LegacySQLTypeName.RECORD, nestedMessage) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(MessageType.getDescriptor(), GroupType.getDescriptor())); @@ -801,7 +801,7 @@ public void testBQRecordMismatch() { Field.newBuilder("mismatchlvl0", LegacySQLTypeName.RECORD, nestedMessage0) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); try { compact.check("projects/p/datasets/d/tables/t", MessageTypeMismatch.getDescriptor(), false); fail("Should fail: Proto schema type should not match BQ String."); @@ -831,7 +831,7 @@ public void testBQRecordMatch() { Field.newBuilder("mismatchlvl0", LegacySQLTypeName.RECORD, nestedMessage0) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", MessageTypeMismatch.getDescriptor(), false); verify(mockBigquery, times(1)).getTable(any(TableId.class)); verify(mockBigqueryTable, times(1)).getDefinition(); @@ -844,7 +844,7 @@ public void testBQString() { Field.newBuilder("test_field_type", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(StringType.getDescriptor(), EnumType.getDescriptor())); @@ -875,7 +875,7 @@ public void testBQTime() { Field.newBuilder("test_field_type", LegacySQLTypeName.TIME) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>(Arrays.asList(Int64Type.getDescriptor(), SFixed64Type.getDescriptor())); @@ -906,7 +906,7 @@ public void testBQTimestamp() { Field.newBuilder("test_field_type", LegacySQLTypeName.TIMESTAMP) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); HashSet compatible = new HashSet<>( Arrays.asList( @@ -948,7 +948,7 @@ public void testBQTopLevelMismatch() { Field.newBuilder("test_toplevel_mismatch", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); try { compact.check("projects/p/datasets/d/tables/t", StringType.getDescriptor(), false); } catch (IllegalArgumentException expected) { @@ -977,7 +977,7 @@ public void testBQTopLevelMatch() { Field.newBuilder("match", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", TopLevelMatch.getDescriptor(), false); verify(mockBigquery, times(1)).getTable(any(TableId.class)); verify(mockBigqueryTable, times(1)).getDefinition(); @@ -990,7 +990,7 @@ public void testAllowUnknownUnsupportedFields() { Field.newBuilder("string_value", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check( "projects/p/datasets/d/tables/t", AllowUnknownUnsupportedFields.getDescriptor(), true); verify(mockBigquery, times(1)).getTable(any(TableId.class)); @@ -1004,7 +1004,7 @@ public void testLowerCase() { Field.newBuilder("tEsT_fIeLd_TyPe", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .build())); - SchemaCompact compact = SchemaCompact.getInstance(mockBigquery); + SchemaCompatibility compact = SchemaCompatibility.getInstance(mockBigquery); compact.check("projects/p/datasets/d/tables/t", StringType.getDescriptor(), true); verify(mockBigquery, times(1)).getTable(any(TableId.class)); verify(mockBigqueryTable, times(1)).getDefinition(); diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCacheTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCacheTest.java index eb249ddd39..450789da36 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCacheTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/WriterCacheTest.java @@ -59,7 +59,7 @@ public class WriterCacheTest { private static MockBigQueryWrite mockBigQueryWrite; private static MockServiceHelper serviceHelper; - @Mock private static SchemaCompact mockSchemaCheck; + @Mock private static SchemaCompatibility mockSchemaCheck; private BigQueryWriteClient client; private LocalChannelProvider channelProvider;