Skip to content

Commit

Permalink
docs(samples): Convert tightly coupled local variable involved option…
Browse files Browse the repository at this point in the history
…s inside of method into method arguments (#393)

As of the current version we can see that more options related to argument
are written inside of the test instead of included as arguments, so I think
as this is tightly coupled with argument, it will be better to pass it as well.

Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
- [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/java-bigquery/issues/new/choose) before writing your code!  That way we can discuss the change, evaluate designs, and agree on the general idea
- [x] Ensure the tests and linter pass
- [x] Code coverage does not decrease (if any source code was changed)
- [x] Appropriate docs were updated (if necessary)

Fixes #394 ☕️
  • Loading branch information
irvifa committed May 28, 2020
1 parent 3f13ccb commit a11f3cb
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 23 deletions.
Expand Up @@ -34,10 +34,15 @@ public static void runCreatePartitionedTable() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
createPartitionedTable(datasetName, tableName);
Schema schema =
Schema.of(
Field.of("stringField", StandardSQLTypeName.STRING),
Field.of("booleanField", StandardSQLTypeName.BOOL),
Field.of("dateField", StandardSQLTypeName.DATE));
createPartitionedTable(datasetName, tableName, schema);
}

public static void createPartitionedTable(String datasetName, String tableName) {
public static void createPartitionedTable(String datasetName, String tableName, Schema schema) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -47,12 +52,6 @@ public static void createPartitionedTable(String datasetName, String tableName)

TimePartitioning partitioning = TimePartitioning.of(TimePartitioning.Type.DAY);

Schema schema =
Schema.of(
Field.of("stringField", StandardSQLTypeName.STRING),
Field.of("booleanField", StandardSQLTypeName.BOOL),
Field.of("dateField", StandardSQLTypeName.DATE));

StandardTableDefinition tableDefinition =
StandardTableDefinition.newBuilder()
.setSchema(schema)
Expand Down
Expand Up @@ -35,12 +35,19 @@ public static void runExtractTableToJson() {
String tableName = "shakespeare";
String bucketName = "my-bucket";
String destinationUri = "gs://" + bucketName + "/path/to/file";
extractTableToJson(projectId, datasetName, tableName, destinationUri);
// For more information on export formats available see:
// https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types
// For more information on Job see:
// https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html

String dataFormat = "CSV";
extractTableToJson(projectId, datasetName, tableName, destinationUri, dataFormat);
}

// Exports datasetName:tableName to destinationUri as raw CSV
public static void extractTableToJson(
String projectId, String datasetName, String tableName, String destinationUri) {
String projectId, String datasetName, String tableName, String destinationUri,
String dataFormat) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -49,11 +56,7 @@ public static void extractTableToJson(
TableId tableId = TableId.of(projectId, datasetName, tableName);
Table table = bigquery.getTable(tableId);

// For more information on export formats available see:
// https://cloud.google.com/bigquery/docs/exporting-data#export_formats_and_compression_types
// For more information on Job see:
// https://googleapis.dev/java/google-cloud-clients/latest/index.html?com/google/cloud/bigquery/package-summary.html
Job job = table.extract("CSV", destinationUri);
Job job = table.extract(dataFormat, destinationUri);

// Blocks until this job completes its execution, either failing or succeeding.
Job completedJob =
Expand All @@ -68,7 +71,7 @@ public static void extractTableToJson(
"BigQuery was unable to extract due to an error: \n" + job.getStatus().getError());
return;
}
System.out.println("Table export successful. Check in GCS bucket for the CSV file.");
System.out.println("Table export successful. Check in GCS bucket for the " + dataFormat + " file.");
} catch (BigQueryException | InterruptedException e) {
System.out.println("Table extraction job was interrupted. \n" + e.toString());
}
Expand Down
Expand Up @@ -41,10 +41,11 @@ public static void runLoadLocalFile() throws IOException, InterruptedException {
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
loadLocalFile(datasetName, tableName, csvPath);
loadLocalFile(datasetName, tableName, csvPath, FormatOptions.csv());
}

public static void loadLocalFile(String datasetName, String tableName, Path csvPath)
public static void loadLocalFile(String datasetName, String tableName, Path csvPath,
FormatOptions formatOptions)
throws IOException, InterruptedException {
try {
// Initialize client that will be used to send requests. This client only needs to be created
Expand All @@ -54,7 +55,7 @@ public static void loadLocalFile(String datasetName, String tableName, Path csvP

WriteChannelConfiguration writeChannelConfiguration =
WriteChannelConfiguration.newBuilder(tableId)
.setFormatOptions(FormatOptions.csv())
.setFormatOptions(formatOptions)
.build();

// The location and JobName must be specified; other fields can be auto-detected.
Expand Down
Expand Up @@ -19,6 +19,9 @@
import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardSQLTypeName;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.junit.After;
Expand Down Expand Up @@ -58,8 +61,13 @@ public void tearDown() {
@Test
public void testCreatePartitionedTable() {
String tableName = "MY_PARTITIONED_TABLE";
Schema schema =
Schema.of(
Field.of("stringField", StandardSQLTypeName.STRING),
Field.of("booleanField", StandardSQLTypeName.BOOL),
Field.of("dateField", StandardSQLTypeName.DATE));

CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName);
CreatePartitionedTable.createPartitionedTable(BIGQUERY_DATASET_NAME, tableName, schema);

assertThat(bout.toString()).contains("Partitioned table created successfully");

Expand Down
Expand Up @@ -61,10 +61,12 @@ public void testExtractTableToJson() {
String datasetName = "samples";
String tableName = "shakespeare";
String destinationUri = "gs://" + GCS_BUCKET + "/extractTest.csv";
String dataFormat = "CSV";

// Extract table content to GCS in CSV format
ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri);
ExtractTableToJson.extractTableToJson(projectId, datasetName, tableName, destinationUri,
dataFormat);
assertThat(bout.toString())
.contains("Table export successful. Check in GCS bucket for the CSV file.");
.contains("Table export successful. Check in GCS bucket for the " + dataFormat + " file.");
}
}
Expand Up @@ -20,6 +20,7 @@
import static junit.framework.TestCase.assertNotNull;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
import java.io.ByteArrayOutputStream;
Expand Down Expand Up @@ -76,7 +77,7 @@ public void loadLocalFile() throws IOException, InterruptedException {

Path csvPath = FileSystems.getDefault().getPath("src/test/resources", "bigquery_noheader.csv");

LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath);
LoadLocalFile.loadLocalFile(BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv());

assertThat(bout.toString()).contains("Successfully loaded");

Expand Down

0 comments on commit a11f3cb

Please sign in to comment.