Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

docs(samples): Convert tightly coupled local variable inside of method into method arguments #386

Merged
Expand Up @@ -32,6 +32,8 @@
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.TableId;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

public class AddColumnLoadAppend {
Expand All @@ -41,31 +43,35 @@ public static void runAddColumnLoadAppend() throws Exception {
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
String sourceUri = "/path/to/file.csv";
addColumnLoadAppend(datasetName, tableName, sourceUri);
// Add a new column to a BigQuery table while appending rows via a load job.
// 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be
// 'NULLABLE'.
Schema schema =
Schema.of(
Field.newBuilder("name", LegacySQLTypeName.STRING)
.setMode(Field.Mode.REQUIRED)
.build());

List<Field> fields = schema.getFields();
// Adding below additional column during the load job
Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
.setMode(Field.Mode.NULLABLE)
.build();
List<Field> newFields = new ArrayList<>(fields);
newFields.add(newField);
Schema newSchema = Schema.of(newFields);
addColumnLoadAppend(datasetName, tableName, sourceUri, newSchema);
}

public static void addColumnLoadAppend(String datasetName, String tableName, String sourceUri)
throws Exception {
public static void addColumnLoadAppend(String datasetName, String tableName,
String sourceUri, Schema newSchema) throws Exception {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

TableId tableId = TableId.of(datasetName, tableName);

// Add a new column to a BigQuery table while appending rows via a load job.
// 'REQUIRED' fields cannot be added to an existing schema, so the additional column must be
// 'NULLABLE'.
Schema newSchema =
Schema.of(
Field.newBuilder("name", LegacySQLTypeName.STRING)
.setMode(Field.Mode.REQUIRED)
.build(),
// Adding below additional column during the load job
Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
.setMode(Field.Mode.NULLABLE)
.build());

LoadJobConfiguration loadJobConfig =
LoadJobConfiguration.builder(tableId, sourceUri)
.setFormatOptions(FormatOptions.csv())
Expand Down
Expand Up @@ -32,17 +32,18 @@ public class LoadParquet {
public static void runLoadParquet() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
loadParquet(datasetName);
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
String tableName = "us_states";
loadParquet(datasetName, tableName, sourceUri);
}

public static void loadParquet(String datasetName) {
public static void loadParquet(String datasetName, String tableName, String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
TableId tableId = TableId.of(datasetName, "us_states");
TableId tableId = TableId.of(datasetName, tableName);

LoadJobConfiguration configuration =
LoadJobConfiguration.builder(tableId, sourceUri)
Expand Down
Expand Up @@ -34,10 +34,13 @@ public class LoadParquetReplaceTable {
public static void runLoadParquetReplaceTable() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
loadParquetReplaceTable(datasetName);
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
String tableName = "us_states";
loadParquetReplaceTable(datasetName, tableName, sourceUri);
}

public static void loadParquetReplaceTable(String datasetName) {
public static void loadParquetReplaceTable(String datasetName, String tableName,
String sourceUri) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -46,8 +49,7 @@ public static void loadParquetReplaceTable(String datasetName) {
// Imports a GCS file into a table and overwrites table data if table already exists.
// This sample loads CSV file at:
// https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.csv
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
TableId tableId = TableId.of(datasetName, "us_states");
TableId tableId = TableId.of(datasetName, tableName);

// For more information on LoadJobConfiguration see:
// https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/bigquery/LoadJobConfiguration.Builder.html
Expand Down
Expand Up @@ -34,10 +34,16 @@ public static void runTableInsertRows() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
tableInsertRows(datasetName, tableName);
// Create a row to insert
Map<String, Object> rowContent = new HashMap<>();
rowContent.put("booleanField", true);
rowContent.put("numericField", "3.14");

tableInsertRows(datasetName, tableName, rowContent);
}

public static void tableInsertRows(String datasetName, String tableName) {
public static void tableInsertRows(String datasetName, String tableName,
Map<String, Object> rowContent) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -46,11 +52,6 @@ public static void tableInsertRows(String datasetName, String tableName) {
// Get table
TableId tableId = TableId.of(datasetName, tableName);

// Create a row to insert
Map<String, Object> rowContent = new HashMap<>();
rowContent.put("booleanField", true);
rowContent.put("numericField", "3.14");

// Inserts rowContent into datasetName:tableId.
InsertAllResponse response =
bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow(rowContent).build());
Expand Down
Expand Up @@ -31,22 +31,22 @@ public class UpdateDatasetAccess {
public static void runUpdateDatasetAccess() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
updateDatasetAccess(datasetName);
// Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com"
// For more information on the types of ACLs available see:
// https://cloud.google.com/storage/docs/access-control/lists
Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER);

updateDatasetAccess(datasetName, newEntry);
}

public static void updateDatasetAccess(String datasetName) {
public static void updateDatasetAccess(String datasetName, Acl newEntry) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

Dataset dataset = bigquery.getDataset(datasetName);

// Create a new ACL granting the READER role to "sample.bigquery.dev@gmail.com"
// For more information on the types of ACLs available see:
// https://cloud.google.com/storage/docs/access-control/lists
Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER);

// Get a copy of the ACLs list from the dataset and append the new entry
ArrayList<Acl> acls = new ArrayList<>(dataset.getAcl());
acls.add(newEntry);
Expand Down
Expand Up @@ -28,18 +28,17 @@ public class UpdateDatasetExpiration {
public static void runUpdateDatasetExpiration() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
updateDatasetExpiration(datasetName);
// Update dataset expiration to one day
Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
updateDatasetExpiration(datasetName, newExpiration);
}

public static void updateDatasetExpiration(String datasetName) {
public static void updateDatasetExpiration(String datasetName, Long newExpiration) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

// Update dataset expiration to one day
Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);

Dataset dataset = bigquery.getDataset(datasetName);
bigquery.update(dataset.toBuilder().setDefaultTableLifetime(newExpiration).build());
System.out.println("Dataset description updated successfully to " + newExpiration);
Expand Down
Expand Up @@ -29,18 +29,18 @@ public static void runUpdateTableExpiration() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
updateTableExpiration(datasetName, tableName);
// Update table expiration to one day.
Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
updateTableExpiration(datasetName, tableName, newExpiration);
}

public static void updateTableExpiration(String datasetName, String tableName) {
public static void updateTableExpiration(String datasetName, String tableName,
Long newExpiration) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();

// Update table expiration to one day
Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);

Table table = bigquery.getTable(datasetName, tableName);
bigquery.update(table.toBuilder().setExpirationTime(newExpiration).build());

Expand Down
Expand Up @@ -24,6 +24,8 @@
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
Expand Down Expand Up @@ -71,7 +73,16 @@ public void testAddColumnLoadAppend() throws Exception {

CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, originalSchema);

AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri);
List<Field> fields = originalSchema.getFields();
// Adding below additional column during the load job
Field newField = Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
.setMode(Field.Mode.NULLABLE)
.build();
List<Field> newFields = new ArrayList<>(fields);
newFields.add(newField);
Schema newSchema = Schema.of(newFields);

AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri, newSchema);

assertThat(bout.toString()).contains("Column successfully added during load append job");

Expand Down
Expand Up @@ -57,7 +57,9 @@ public void tearDown() {

@Test
public void loadParquet() {
LoadParquet.loadParquet(BIGQUERY_DATASET_NAME);
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
String tableName = "us_states";
LoadParquet.loadParquet(BIGQUERY_DATASET_NAME, tableName, sourceUri);
assertThat(bout.toString()).contains("GCS parquet loaded successfully.");
}
}
Expand Up @@ -57,7 +57,9 @@ public void tearDown() {

@Test
public void testLoadParquetReplaceTable() {
LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME);
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet";
String tableName = "us_states";
LoadParquetReplaceTable.loadParquetReplaceTable(BIGQUERY_DATASET_NAME, tableName, sourceUri);
assertThat(bout.toString()).contains("GCS parquet overwrote existing table successfully.");
}
}
Expand Up @@ -24,6 +24,8 @@
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.junit.After;
import org.junit.Before;
Expand Down Expand Up @@ -70,8 +72,13 @@ public void testTableInsertRows() {
// Create table in dataset for testing
CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema);

// Create a row to insert
Map<String, Object> rowContent = new HashMap<>();
rowContent.put("booleanField", true);
rowContent.put("numericField", "3.14");

// Testing
TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName);
TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName, rowContent);
assertThat(bout.toString()).contains("Rows successfully inserted into table");

// Clean up
Expand Down
Expand Up @@ -19,6 +19,9 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;

import com.google.cloud.bigquery.Acl;
import com.google.cloud.bigquery.Acl.Role;
import com.google.cloud.bigquery.Acl.User;
import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
Expand Down Expand Up @@ -62,8 +65,9 @@ public void updateDatasetAccess() {
// Create a dataset in order to modify its ACL
CreateDataset.createDataset(generatedDatasetName);

Acl newEntry = Acl.of(new User("sample.bigquery.dev@gmail.com"), Role.READER);
// Modify dataset's ACL
UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName);
UpdateDatasetAccess.updateDatasetAccess(generatedDatasetName, newEntry);
assertThat(bout.toString()).contains("Dataset Access Control updated successfully");

// Clean up
Expand Down
Expand Up @@ -22,6 +22,7 @@
import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
Expand Down Expand Up @@ -62,8 +63,9 @@ public void updateDatasetExpiration() {
// Create a dataset in order to modify its expiration
CreateDataset.createDataset(generatedDatasetName);

Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
// Modify dataset's expiration
UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName);
UpdateDatasetExpiration.updateDatasetExpiration(generatedDatasetName, newExpiration);
assertThat(bout.toString()).contains("Dataset description updated successfully");

// Clean up
Expand Down
Expand Up @@ -21,6 +21,7 @@

import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.concurrent.TimeUnit;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
Expand Down Expand Up @@ -59,7 +60,8 @@ public void tearDown() {
public void updateTableExpiration() {
String tableName = "update_expiration_table";
CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, null);
UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName);
Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS);
UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName, newExpiration);
assertThat(bout.toString()).contains("Table expiration updated successfully");

// Clean up
Expand Down