From b480de415e53782efb9c67b4a920367075cfaee3 Mon Sep 17 00:00:00 2001 From: Praful Makani Date: Thu, 30 Jul 2020 21:02:15 +0530 Subject: [PATCH] refactor(samples): load clustered table (#622) --- .../example/bigquery/LoadTableClustered.java | 23 ++++++++----------- .../bigquery/LoadTableClusteredIT.java | 21 +++++++++-------- 2 files changed, 21 insertions(+), 23 deletions(-) diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java index a3e024518..ad171eca6 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java @@ -33,9 +33,10 @@ import com.google.common.collect.ImmutableList; import java.util.List; +// Sample to load clustered table. public class LoadTableClustered { - public static void runLoadTableClustered() throws Exception { + public static void runLoadTableClustered() { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; @@ -54,8 +55,7 @@ public static void loadTableClustered( String tableName, String sourceUri, Schema schema, - List clusteringFields) - throws Exception { + List clusteringFields) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -80,19 +80,16 @@ public static void loadTableClustered( // Load data from a GCS parquet file into the table // Blocks until this load table job completes its execution, either failing or succeeding. - Job completedJob = loadJob.waitFor(); + Job job = loadJob.waitFor(); // Check for errors - if (completedJob == null) { - throw new Exception("Job not executed since it no longer exists."); - } else if (completedJob.getStatus().getError() != null) { - // You can also look at queryJob.getStatus().getExecutionErrors() for all - // errors, not just the latest one. - throw new Exception( - "BigQuery was unable to load into the table due to an error: \n" - + loadJob.getStatus().getError()); + if (job.isDone() && job.getStatus().getError() == null) { + System.out.println("Data successfully loaded into clustered table during load job"); + } else { + System.out.println( + "BigQuery was unable to load into the table due to an error:" + + job.getStatus().getError()); } - System.out.println("Data successfully loaded into clustered table during load job"); } catch (BigQueryException | InterruptedException e) { System.out.println("Data not loaded into clustered table during load job \n" + e.toString()); } diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java index 3c05c7285..3567dea9f 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadTableClusteredIT.java @@ -25,21 +25,26 @@ import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.util.UUID; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class LoadTableClusteredIT { + + private String tableName; private ByteArrayOutputStream bout; private PrintStream out; - private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + private static final String BIGQUERY_DATASET_NAME = requireEnvVar("BIGQUERY_DATASET_NAME"); - private static void requireEnvVar(String varName) { + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); assertNotNull( "Environment variable " + varName + " is required to perform these tests.", System.getenv(varName)); + return value; } @BeforeClass @@ -49,6 +54,7 @@ public static void checkRequirements() { @Before public void setUp() { + tableName = "LOAD_CLUSTERED_TABLE_TEST_" + UUID.randomUUID().toString().substring(0, 8); bout = new ByteArrayOutputStream(); out = new PrintStream(bout); System.setOut(out); @@ -56,15 +62,14 @@ public void setUp() { @After public void tearDown() { + // Clean up + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); System.setOut(null); } @Test - public void loadTableClustered() throws Exception { + public void testLoadTableClustered() { String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states-by-date-no-header.csv"; - - String tableName = "LOAD_CLUSTERED_TABLE_TEST"; - Schema schema = Schema.of( Field.of("name", StandardSQLTypeName.STRING), @@ -73,11 +78,7 @@ public void loadTableClustered() throws Exception { LoadTableClustered.loadTableClustered( BIGQUERY_DATASET_NAME, tableName, sourceUri, schema, ImmutableList.of("name", "post_abbr")); - assertThat(bout.toString()) .contains("Data successfully loaded into clustered table during load job"); - - // Clean up - DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); } }