Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

docs(samples): Convert tightly coupled local variable involved options inside of method into method arguments #393

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -34,10 +34,15 @@ public static void runCreatePartitionedTable() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
createPartitionedTable(datasetName, tableName);
Schema schema =
Schema.of(
Field.of("stringField", StandardSQLTypeName.STRING),
Field.of("booleanField", StandardSQLTypeName.BOOL),
Field.of("dateField", StandardSQLTypeName.DATE));
createPartitionedTable(datasetName, tableName, schema);
}

public static void createPartitionedTable(String datasetName, String tableName) {
public static void createPartitionedTable(String datasetName, String tableName, Schema schema) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -47,12 +52,6 @@ public static void createPartitionedTable(String datasetName, String tableName)

TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY);

Schema schema =
Schema.of(
Field.of("stringField", StandardSQLTypeName.STRING),
Field.of("booleanField", StandardSQLTypeName.BOOL),
Field.of("dateField", StandardSQLTypeName.DATE));

StandardTableDefinition tableDefinition =
StandardTableDefinition.newBuilder()
.setSchema(schema)
Expand Down
Expand Up @@ -35,12 +35,19 @@ public static void runExtractTableToJson() {
String tableName = "shakespeare";
String bucketName = "my-bucket";
String destinationUri = "gs://" + bucketName + "/path/to/file";
extractTableToJson(projectId, datasetName, tableName, destinationUri);
// For more information on export formats available see:
// https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types
// For more information on Job see:
// https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html

String dataFormat = "CSV";
extractTableToJson(projectId, datasetName, tableName, destinationUri, dataFormat);
}

// Exports datasetName:tableName to destinationUri as raw CSV
public static void extractTableToJson(
String projectId, String datasetName, String tableName, String destinationUri) {
String projectId, String datasetName, String tableName, String destinationUri,
String dataFormat) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -49,11 +56,7 @@ public static void extractTableToJson(
TableId tableId = TableId.of(projectId, datasetName, tableName);
Table table = bigquery.getTable(tableId);

// For more information on export formats available see:
// https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types
// For more information on Job see:
// https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html
Job job = table.extract("CSV", destinationUri);
Job job = table.extract(dataFormat, destinationUri);

// Blocks until this job completes its execution, either failing or succeeding.
Job completedJob =
Expand All @@ -68,7 +71,7 @@ public static void extractTableToJson(
"BigQuery was unable to extract due to an error: \n" + job.getStatus().getError());
return;
}
System.out.println("Table export successful. Check in GCS bucket for the CSV file.");
System.out.println("Table export successful. Check in GCS bucket for the " + dataFormat + " file.");
} catch (BigQueryException | InterruptedException e) {
System.out.println("Table extraction job was interrupted. \n" + e.toString());
}
Expand Down
Expand Up @@ -41,10 +41,11 @@ public static void runLoadLocalFile() throws IOException, InterruptedException {
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
loadLocalFile(datasetName, tableName, csvPath);
loadLocalFile(datasetName, tableName, csvPath, FormatOptions.csv());
}

public static void loadLocalFile(String datasetName, String tableName, Path csvPath)
public static void loadLocalFile(String datasetName, String tableName, Path csvPath,
FormatOptions formatOptions)
throws IOException, InterruptedException {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand All @@ -54,7 +55,7 @@ public static void loadLocalFile(String datasetName, String tableName, Path csvP

WriteChannelConfiguration writeChannelConfiguration =
WriteChannelConfiguration.newBuilder(tableId)
.setFormatOptions(FormatOptions.csv())
.setFormatOptions(formatOptions)
.build();

// The location and JobName must be specified; other fields can be auto-detected.
Expand Down
Expand Up @@ -19,6 +19,9 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardSQLTypeName;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.junit.After;
Expand Down Expand Up @@ -58,8 +61,13 @@ public void tearDown() {
@Test
public void testCreatePartitionedTable() {
String tableName = "MY_PARTITIONED_TABLE";
Schema schema =
Schema.of(
Field.of("stringField", StandardSQLTypeName.STRING),
Field.of("booleanField", StandardSQLTypeName.BOOL),
Field.of("dateField", StandardSQLTypeName.DATE));

CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName);
CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName, schema);

assertThat(bout.toString()).contains("Partitioned table created successfully");

Expand Down
Expand Up @@ -61,10 +61,12 @@ public void testExtractTableToJson() {
String datasetName = "samples";
String tableName = "shakespeare";
String destinationUri = "gs://" + GCS_BUCKET + "/extractTest.csv";
String dataFormat = "CSV";

// Extract table content to GCS in CSV format
ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri);
ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri,
dataFormat);
assertThat(bout.toString())
.contains("Table export successful. Check in GCS bucket for the CSV file.");
.contains("Table export successful. Check in GCS bucket for the " + dataFormat + " file.");
}
}
Expand Up @@ -20,6 +20,7 @@
import static junit.framework.TestCase.assertNotNull;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
Expand Down Expand Up @@ -76,7 +77,7 @@ public void loadLocalFile() throws IOException, InterruptedException {

Path csvPath = FileSystems.getDefault().getPath("src/test/resources", "bigquery_noheader.csv");

LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath);
LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv());

assertThat(bout.toString()).contains("Successfully loaded");

Expand Down