diff --git a/google-cloud-bigquery/clirr-ignored-differences.xml b/google-cloud-bigquery/clirr-ignored-differences.xml new file mode 100644 index 000000000..bc998edc3 --- /dev/null +++ b/google-cloud-bigquery/clirr-ignored-differences.xml @@ -0,0 +1,35 @@ + + + + + + 7012 + com/google/cloud/bigquery/BigQuery + com.google.cloud.Policy getIamPolicy(com.google.cloud.bigquery.TableId, com.google.cloud.bigquery.BigQuery$IAMOption[]) + + + 7012 + com/google/cloud/bigquery/BigQuery + com.google.cloud.Policy setIamPolicy(com.google.cloud.bigquery.TableId, com.google.cloud.Policy, com.google.cloud.bigquery.BigQuery$IAMOption[]) + + + 7012 + com/google/cloud/bigquery/BigQuery + java.util.List testIamPermissions(com.google.cloud.bigquery.TableId, java.util.List, com.google.cloud.bigquery.BigQuery$IAMOption[]) + + + 7012 + com/google/cloud/bigquery/spi/v2/BigQueryRpc + com.google.api.services.bigquery.model.Policy getIamPolicy(java.lang.String, java.util.Map) + + + 7012 + com/google/cloud/bigquery/spi/v2/BigQueryRpc + com.google.api.services.bigquery.model.Policy setIamPolicy(java.lang.String, com.google.api.services.bigquery.model.Policy, java.util.Map) + + + 7012 + com/google/cloud/bigquery/spi/v2/BigQueryRpc + com.google.api.services.bigquery.model.TestIamPermissionsResponse testIamPermissions(java.lang.String, java.util.List, java.util.Map) + + diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java index 4da7fe5e6..b55bae228 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java @@ -393,6 +393,7 @@ Access toPb() { */ public static final class IamMember extends Entity { + private static final long serialVersionUID = 3562909264454016939L; private final String iamMember; /** Creates a iamMember entity given the iamMember. */ diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java index 5ece9cb0e..70a29fd9f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java @@ -22,6 +22,7 @@ import com.google.api.gax.paging.Page; import com.google.cloud.FieldSelector; import com.google.cloud.FieldSelector.Helper; +import com.google.cloud.Policy; import com.google.cloud.RetryOption; import com.google.cloud.Service; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; @@ -42,7 +43,7 @@ public interface BigQuery extends Service { /** * Fields of a BigQuery Dataset resource. * - * @see Dataset + * @see Dataset * Resource */ enum DatasetField implements FieldSelector { @@ -77,7 +78,7 @@ public String getSelector() { /** * Fields of a BigQuery Table resource. * - * @see Table + * @see Table * Resource */ enum TableField implements FieldSelector { @@ -121,7 +122,7 @@ public String getSelector() { /** * Fields of a BigQuery Model resource. * - * @see Model + * @see Model * Resource */ enum ModelField implements FieldSelector { @@ -156,7 +157,7 @@ public String getSelector() { /** * Fields of a BigQuery Routine resource. * - * @see Routine + * @see Routine * Resource */ enum RoutineField implements FieldSelector { @@ -189,7 +190,7 @@ public String getSelector() { /** * Fields of a BigQuery Job resource. * - * @see Job Resource + * @see Job Resource * */ enum JobField implements FieldSelector { @@ -373,6 +374,20 @@ public static TableOption fields(TableField... fields) { } } + /* Class for specifying IAM options. */ + class IAMOption extends Option { + + private static final long serialVersionUID = 8607992885371024269L; + + private IAMOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + public static IAMOption requestedPolicyVersion(long version) { + return new IAMOption(BigQueryRpc.Option.REQUESTED_POLICY_VERSION, version); + } + } + /** Class for specifying model get, create and update options. */ class ModelOption extends Option { @@ -643,17 +658,20 @@ public int hashCode() { * *

Example of creating a dataset. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * Dataset dataset = null;
-   * DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
-   * try {
-   *   // the dataset was created
-   *   dataset = bigquery.create(datasetInfo);
-   * } catch (BigQueryException e) {
-   *   // the dataset was not created
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   Dataset dataset = null;
+   *   DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
+   *   try {
+   *     // the dataset was created
+   *     dataset = bigquery.create(datasetInfo);
+   *   } catch (BigQueryException e) {
+   *     // the dataset was not created
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -664,19 +682,22 @@ public int hashCode() { * *

Example of creating a table. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * String fieldName = "string_field";
-   * TableId tableId = TableId.of(datasetName, tableName);
-   * // Table field definition
-   * Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
-   * // Table schema definition
-   * Schema schema = Schema.of(field);
-   * TableDefinition tableDefinition = StandardTableDefinition.of(schema);
-   * TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
-   * Table table = bigquery.create(tableInfo);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   String fieldName = "string_field";
+   *   TableId tableId = TableId.of(datasetName, tableName);
+   *   // Table field definition
+   *   Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
+   *   // Table schema definition
+   *   Schema schema = Schema.of(field);
+   *   TableDefinition tableDefinition = StandardTableDefinition.of(schema);
+   *   TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
+   *   Table table = bigquery.create(tableInfo);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -694,46 +715,46 @@ public int hashCode() { * *

Example of loading a newline-delimited-json file with textual fields from GCS to a table. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
-   * TableId tableId = TableId.of(datasetName, tableName);
-   * // Table field definition
-   * Field[] fields =
-   *     new Field[] {
-   *       Field.of("name", LegacySQLTypeName.STRING),
-   *       Field.of("post_abbr", LegacySQLTypeName.STRING)
-   *     };
-   * // Table schema definition
-   * Schema schema = Schema.of(fields);
-   * LoadJobConfiguration configuration =
-   *     LoadJobConfiguration.builder(tableId, sourceUri)
-   *         .setFormatOptions(FormatOptions.json())
-   *         .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
-   *         .setSchema(schema)
-   *         .build();
-   * // Load the table
-   * Job loadJob = bigquery.create(JobInfo.of(configuration));
-   * loadJob = loadJob.waitFor();
-   * // Check the table
-   * System.out.println("State: " + loadJob.getStatus().getState());
-   * return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
+   *   TableId tableId = TableId.of(datasetName, tableName);
+   *   // Table field definition
+   *   Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
+   *       Field.of("post_abbr", LegacySQLTypeName.STRING) };
+   *   // Table schema definition
+   *   Schema schema = Schema.of(fields);
+   *   LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
+   *       .setFormatOptions(FormatOptions.json()).setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
+   *       .setSchema(schema).build();
+   *   // Load the table
+   *   Job loadJob = bigquery.create(JobInfo.of(configuration));
+   *   loadJob = loadJob.waitFor();
+   *   // Check the table
+   *   System.out.println("State: " + loadJob.getStatus().getState());
+   *   return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
+   * }
+   * 
* *

Example of creating a query job. * - *

{@code
-   * String query = "SELECT field FROM my_dataset_name.my_table_name";
-   * Job job = null;
-   * JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
-   * JobInfo jobInfo = JobInfo.of(jobConfiguration);
-   * try {
-   *   job = bigquery.create(jobInfo);
-   * } catch (BigQueryException e) {
-   *   // the job was not created
+   * 
+   * {
+   *   @code
+   *   String query = "SELECT field FROM my_dataset_name.my_table_name";
+   *   Job job = null;
+   *   JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
+   *   JobInfo jobInfo = JobInfo.of(jobConfiguration);
+   *   try {
+   *     job = bigquery.create(jobInfo);
+   *   } catch (BigQueryException e) {
+   *     // the job was not created
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -744,10 +765,13 @@ public int hashCode() { * *

Example of getting a dataset. * - *

{@code
-   * String datasetName = "my_dataset";
-   * Dataset dataset = bigquery.getDataset(datasetName);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset";
+   *   Dataset dataset = bigquery.getDataset(datasetName);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -758,12 +782,15 @@ public int hashCode() { * *

Example of getting a dataset. * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   * Dataset dataset = bigquery.getDataset(datasetId);
-   * }
+ *
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   *   Dataset dataset = bigquery.getDataset(datasetId);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -777,13 +804,16 @@ public int hashCode() { * *

Example of listing datasets, specifying the page size. * - *

{@code
-   * // List datasets in the default project
-   * Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
-   * for (Dataset dataset : datasets.iterateAll()) {
-   *   // do something with the dataset
+   * 
+   * {
+   *   @code
+   *   // List datasets in the default project
+   *   Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
+   *   for (Dataset dataset : datasets.iterateAll()) {
+   *     // do something with the dataset
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -797,14 +827,17 @@ public int hashCode() { * *

Example of listing datasets in a project, specifying the page size. * - *

{@code
-   * String projectId = "my_project_id";
-   * // List datasets in a specified project
-   * Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
-   * for (Dataset dataset : datasets.iterateAll()) {
-   *   // do something with the dataset
+   * 
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   // List datasets in a specified project
+   *   Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
+   *   for (Dataset dataset : datasets.iterateAll()) {
+   *     // do something with the dataset
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -815,15 +848,18 @@ public int hashCode() { * *

Example of deleting a dataset from its id, even if non-empty. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
-   * if (deleted) {
-   *   // the dataset was deleted
-   * } else {
-   *   // the dataset was not found
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+   *   if (deleted) {
+   *     // the dataset was deleted
+   *   } else {
+   *     // the dataset was not found
+   *   }
    * }
-   * }
+ *
* * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -835,17 +871,20 @@ public int hashCode() { * *

Example of deleting a dataset, even if non-empty. * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
-   * if (deleted) {
-   *   // the dataset was deleted
-   * } else {
-   *   // the dataset was not found
+   * 
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   *   boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+   *   if (deleted) {
+   *     // the dataset was deleted
+   *   } else {
+   *     // the dataset was not found
+   *   }
    * }
-   * }
+ *
* * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -869,18 +908,21 @@ public int hashCode() { * *

Example of deleting a table. * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * TableId tableId = TableId.of(projectId, datasetName, tableName);
-   * boolean deleted = bigquery.delete(tableId);
-   * if (deleted) {
-   *   // the table was deleted
-   * } else {
-   *   // the table was not found
+   * 
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
+   *   boolean deleted = bigquery.delete(tableId);
+   *   if (deleted) {
+   *     // the table was deleted
+   *   } else {
+   *     // the table was not found
+   *   }
    * }
-   * }
+ *
* * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -892,18 +934,21 @@ public int hashCode() { * *

Example of deleting a model. * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_model_name";
-   * ModelId modelId = ModelId.of(projectId, datasetName, modelName);
-   * boolean deleted = bigquery.delete(modelId);
-   * if (deleted) {
-   *   // the model was deleted
-   * } else {
-   *   // the model was not found
+   * 
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_model_name";
+   *   ModelId modelId = ModelId.of(projectId, datasetName, modelName);
+   *   boolean deleted = bigquery.delete(modelId);
+   *   if (deleted) {
+   *     // the model was deleted
+   *   } else {
+   *     // the model was not found
+   *   }
    * }
-   * }
+ *
* * @return {@code true} if model was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -913,9 +958,11 @@ public int hashCode() { /** * Deletes the requested routine. * - *

Example of deleting a routine. + *

+ * Example of deleting a routine. * - *

{@code
+   * 
+   * {@code
    * String projectId = "my_project_id";
    * String datasetId = "my_dataset_id";
    * String routineId = "my_routine_id";
@@ -928,7 +975,9 @@ public int hashCode() {
    * }
    * 
* - * @return {@code true} if routine was deleted, {@code false} if it was not found + * @return {@code true} if routine was deleted, {@code false} if it was not + * found + * * @throws BigQueryException upon failure */ boolean delete(RoutineId routineId); @@ -937,20 +986,22 @@ public int hashCode() { * Updates dataset information. * *

Example of updating a dataset by changing its description. - * + * * - *

{@code
-   * // String datasetName = "my_dataset_name";
-   * // String tableName = "my_table_name";
-   * // String newDescription = "new_description";
+   * 
+   * {
+   *   @code
+   *   // String datasetName = "my_dataset_name";
+   *   // String tableName = "my_table_name";
+   *   // String newDescription = "new_description";
    *
-   * Table beforeTable = bigquery.getTable(datasetName, tableName);
-   * TableInfo tableInfo = beforeTable.toBuilder()
-   *     .setDescription(newDescription)
-   *     .build();
-   * Table afterTable = bigquery.update(tableInfo);
+   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
+   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
+   *   Table afterTable = bigquery.update(tableInfo);
    *
-   * }
+ * } + *
* * * @@ -963,31 +1014,33 @@ public int hashCode() { * *

Example of updating a table by changing its description. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * String newDescription = "new_description";
-   * Table beforeTable = bigquery.getTable(datasetName, tableName);
-   * TableInfo tableInfo = beforeTable.toBuilder()
-   *     .setDescription(newDescription)
-   *     .build();
-   * Table afterTable = bigquery.update(tableInfo);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   String newDescription = "new_description";
+   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
+   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
+   *   Table afterTable = bigquery.update(tableInfo);
+   * }
+   * 
* *

Example of updating a table by changing its expiration. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *
-   * // Set table to expire 5 days from now.
-   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
-   * TableInfo tableInfo = beforeTable.toBuilder()
-   *         .setExpirationTime(expirationMillis)
-   *         .build();
-   * Table afterTable = bigquery.update(tableInfo);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
+   *
+   *   // Set table to expire 5 days from now.
+   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   *   TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
+   *   Table afterTable = bigquery.update(tableInfo);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -998,31 +1051,33 @@ public int hashCode() { * *

Example of updating a model by changing its description. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String modelName = "my_model_name";
-   * String newDescription = "new_description";
-   * Model beforeModel = bigquery.getModel(datasetName, modelName);
-   * ModelInfo modelInfo = beforeModel.toBuilder()
-   *     .setDescription(newDescription)
-   *     .build();
-   * Model afterModel = bigquery.update(modelInfo);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String modelName = "my_model_name";
+   *   String newDescription = "new_description";
+   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
+   *   ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
+   *   Model afterModel = bigquery.update(modelInfo);
+   * }
+   * 
* *

Example of updating a model by changing its expiration. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String modelName = "my_model_name";
-   * Model beforeModel = bigquery.getModel(datasetName, modelName);
-   *
-   * // Set model to expire 5 days from now.
-   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
-   * ModelInfo modelInfo = beforeModel.toBuilder()
-   *         .setExpirationTime(expirationMillis)
-   *         .build();
-   * Model afterModel = bigquery.update(modelInfo);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String modelName = "my_model_name";
+   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
+   *
+   *   // Set model to expire 5 days from now.
+   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   *   ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
+   *   Model afterModel = bigquery.update(modelInfo);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -1040,11 +1095,14 @@ public int hashCode() { * *

Example of getting a table. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * Table table = bigquery.getTable(datasetName, tableName);
-   * }
+ *
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   Table table = bigquery.getTable(datasetName, tableName);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -1055,13 +1113,16 @@ public int hashCode() { * *

Example of getting a table. * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * TableId tableId = TableId.of(projectId, datasetName, tableName);
-   * Table table = bigquery.getTable(tableId);
-   * }
+ *
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
+   *   Table table = bigquery.getTable(tableId);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -1079,13 +1140,16 @@ public int hashCode() { * *

Example of getting a model. * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * String modelName = "my_model_name";
-   * ModelId modelId = ModelId.of(projectId, datasetName, tableName);
-   * Model model = bigquery.getModel(modelId);
-   * }
+ *
+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   String modelName = "my_model_name";
+   *   ModelId modelId = ModelId.of(projectId, datasetName, tableName);
+   *   Model model = bigquery.getModel(modelId);
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -1112,42 +1176,54 @@ public int hashCode() { Page listRoutines(DatasetId datasetId, RoutineListOption... options); /** - * Lists the tables in the dataset. This method returns partial information on each table: ({@link - * Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type, - * which is part of {@link Table#getDefinition()}). To get complete information use either {@link - * #getTable(TableId, TableOption...)} or {@link #getTable(String, String, TableOption...)}. - * - *

Example of listing the tables in a dataset, specifying the page size. - * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * Page tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
-   * for (Table table : tables.iterateAll()) {
-   *   // do something with the table
+   * Lists the tables in the dataset. This method returns partial information on
+   * each table: ({@link Table#getTableId()}, {@link Table#getFriendlyName()},
+   * {@link Table#getGeneratedId()} and type, which is part of
+   * {@link Table#getDefinition()}). To get complete information use either
+   * {@link #getTable(TableId, TableOption...)} or
+   * {@link #getTable(String, String, TableOption...)}.
+   *
+   * 

+ * Example of listing the tables in a dataset, specifying the page size. + * + *

+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   Page
tables = bigquery.listTables(datasetName, TableListOption.pageSize(100)); + * for (Table table : tables.iterateAll()) { + * // do something with the table + * } * } - * } + * * * @throws BigQueryException upon failure */ Page
listTables(String datasetId, TableListOption... options); /** - * Lists the tables in the dataset. This method returns partial information on each table: ({@link - * Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type, - * which is part of {@link Table#getDefinition()}). To get complete information use either {@link - * #getTable(TableId, TableOption...)} or {@link #getTable(String, String, TableOption...)}. - * - *

Example of listing the tables in a dataset. - * - *

{@code
-   * String projectId = "my_project_id";
-   * String datasetName = "my_dataset_name";
-   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   * Page
tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); - * for (Table table : tables.iterateAll()) { - * // do something with the table + * Lists the tables in the dataset. This method returns partial information on + * each table: ({@link Table#getTableId()}, {@link Table#getFriendlyName()}, + * {@link Table#getGeneratedId()} and type, which is part of + * {@link Table#getDefinition()}). To get complete information use either + * {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + *

+ * Example of listing the tables in a dataset. + * + *

+   * {
+   *   @code
+   *   String projectId = "my_project_id";
+   *   String datasetName = "my_dataset_name";
+   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   *   Page
tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); + * for (Table table : tables.iterateAll()) { + * // do something with the table + * } * } - * } + * * * @throws BigQueryException upon failure */ @@ -1170,32 +1246,33 @@ public int hashCode() { * *

Example of inserting rows into a table without running a load job. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * TableId tableId = TableId.of(datasetName, tableName);
-   * // Values of the row to insert
-   * Map rowContent = new HashMap<>();
-   * rowContent.put("booleanField", true);
-   * // Bytes are passed in base64
-   * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
-   * // Records are passed as a map
-   * Map recordsContent = new HashMap<>();
-   * recordsContent.put("stringField", "Hello, World!");
-   * rowContent.put("recordField", recordsContent);
-   * InsertAllResponse response =
-   *     bigquery.insertAll(
-   *         InsertAllRequest.newBuilder(tableId)
-   *             .addRow("rowId", rowContent)
-   *             // More rows can be added in the same RPC by invoking .addRow() on the builder
-   *             .build());
-   * if (response.hasErrors()) {
-   *   // If any of the insertions failed, this lets you inspect the errors
-   *   for (Entry> entry : response.getInsertErrors().entrySet()) {
-   *     // inspect row error
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   TableId tableId = TableId.of(datasetName, tableName);
+   *   // Values of the row to insert
+   *   Map rowContent = new HashMap<>();
+   *   rowContent.put("booleanField", true);
+   *   // Bytes are passed in base64
+   *   rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
+   *   // Records are passed as a map
+   *   Map recordsContent = new HashMap<>();
+   *   recordsContent.put("stringField", "Hello, World!");
+   *   rowContent.put("recordField", recordsContent);
+   *   InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
+   *       // More rows can be added in the same RPC by invoking .addRow() on the
+   *       // builder
+   *       .build());
+   *   if (response.hasErrors()) {
+   *     // If any of the insertions failed, this lets you inspect the errors
+   *     for (Entry> entry : response.getInsertErrors().entrySet()) {
+   *       // inspect row error
+   *     }
    *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1206,17 +1283,20 @@ public int hashCode() { * *

Example of listing table rows, specifying the page size. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
-   * // simply omit the option.
-   * TableResult tableData =
-   *     bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
-   * for (FieldValueList row : tableData.iterateAll()) {
-   *   // do something with the row
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   // This example reads the result 100 rows per RPC call. If there's no need
+   *   // to limit the number,
+   *   // simply omit the option.
+   *   TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+   *   for (FieldValueList row : tableData.iterateAll()) {
+   *     // do something with the row
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1227,18 +1307,21 @@ public int hashCode() { * *

Example of listing table rows, specifying the page size. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * TableId tableIdObject = TableId.of(datasetName, tableName);
-   * // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
-   * // simply omit the option.
-   * TableResult tableData =
-   *     bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
-   * for (FieldValueList row : tableData.iterateAll()) {
-   *   // do something with the row
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   TableId tableIdObject = TableId.of(datasetName, tableName);
+   *   // This example reads the result 100 rows per RPC call. If there's no need
+   *   // to limit the number,
+   *   // simply omit the option.
+   *   TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+   *   for (FieldValueList row : tableData.iterateAll()) {
+   *     // do something with the row
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1272,19 +1355,18 @@ TableResult listTableData( * *

Example of listing table rows with schema. * - *

{@code
-   * Schema schema =
-   *     Schema.of(
-   *         Field.of("word", LegacySQLTypeName.STRING),
-   *         Field.of("word_count", LegacySQLTypeName.STRING),
-   *         Field.of("corpus", LegacySQLTypeName.STRING),
-   *         Field.of("corpus_date", LegacySQLTypeName.STRING));
-   * TableResult tableData =
-   *     bigquery.listTableData(
-   *         TableId.of("bigquery-public-data", "samples", "shakespeare"), schema);
-   * FieldValueList row = tableData.getValues().iterator().next();
-   * System.out.println(row.get("word").getStringValue());
-   * }
+ *
+   * {
+   *   @code
+   *   Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
+   *       Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
+   *       Field.of("corpus_date", LegacySQLTypeName.STRING));
+   *   TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
+   *       schema);
+   *   FieldValueList row = tableData.getValues().iterator().next();
+   *   System.out.println(row.get("word").getStringValue());
+   * }
+   * 
* * @throws BigQueryException upon failure */ @@ -1296,13 +1378,16 @@ TableResult listTableData( * *

Example of getting a job. * - *

{@code
-   * String jobName = "my_job_name";
-   * Job job = bigquery.getJob(jobName);
-   * if (job == null) {
-   *   // job was not found
+   * 
+   * {
+   *   @code
+   *   String jobName = "my_job_name";
+   *   Job job = bigquery.getJob(jobName);
+   *   if (job == null) {
+   *     // job was not found
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1314,14 +1399,17 @@ TableResult listTableData( * *

Example of getting a job. * - *

{@code
-   * String jobName = "my_job_name";
-   * JobId jobIdObject = JobId.of(jobName);
-   * Job job = bigquery.getJob(jobIdObject);
-   * if (job == null) {
-   *   // job was not found
+   * 
+   * {
+   *   @code
+   *   String jobName = "my_job_name";
+   *   JobId jobIdObject = JobId.of(jobName);
+   *   Job job = bigquery.getJob(jobIdObject);
+   *   if (job == null) {
+   *     // job was not found
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1332,12 +1420,15 @@ TableResult listTableData( * *

Example of listing jobs, specifying the page size. * - *

{@code
-   * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
-   * for (Job job : jobs.iterateAll()) {
-   *   // do something with the job
+   * 
+   * {
+   *   @code
+   *   Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+   *   for (Job job : jobs.iterateAll()) {
+   *     // do something with the job
+   *   }
    * }
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1352,15 +1443,18 @@ TableResult listTableData( * *

Example of cancelling a job. * - *

{@code
-   * String jobName = "my_job_name";
-   * boolean success = bigquery.cancel(jobName);
-   * if (success) {
-   *   // job was cancelled
-   * } else {
-   *   // job was not found
+   * 
+   * {
+   *   @code
+   *   String jobName = "my_job_name";
+   *   boolean success = bigquery.cancel(jobName);
+   *   if (success) {
+   *     // job was cancelled
+   *   } else {
+   *     // job was not found
+   *   }
    * }
-   * }
+ *
* * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1378,16 +1472,19 @@ TableResult listTableData( * *

Example of cancelling a job. * - *

{@code
-   * String jobName = "my_job_name";
-   * JobId jobId = JobId.of(jobName);
-   * boolean success = bigquery.cancel(jobId);
-   * if (success) {
-   *   // job was cancelled
-   * } else {
-   *   // job was not found
+   * 
+   * {
+   *   @code
+   *   String jobName = "my_job_name";
+   *   JobId jobId = JobId.of(jobName);
+   *   boolean success = bigquery.cancel(jobId);
+   *   if (success) {
+   *     // job was cancelled
+   *   } else {
+   *     // job was not found
+   *   }
    * }
-   * }
+ *
* * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1406,19 +1503,22 @@ TableResult listTableData( * *

Example of running a query. * - *

{@code
-   * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
-   * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
-   * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
-   *
-   * // Print the results.
-   * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
-   *   for (FieldValue val : row) {
-   *     System.out.printf("%s,", val.toString());
+   * 
+   * {
+   *   @code
+   *   // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
+   *   String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
+   *   QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
+   *
+   *   // Print the results.
+   *   for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
+   *     for (FieldValue val : row) {
+   *       System.out.printf("%s,", val.toString());
+   *     }
+   *     System.out.printf("\n");
    *   }
-   *   System.out.printf("\n");
    * }
-   * }
+ *
* * @throws BigQueryException upon failure * @throws InterruptedException if the current thread gets interrupted while waiting for the query @@ -1463,50 +1563,56 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

Example of creating a channel with which to write to a table. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * String csvData = "StringValue1\nStringValue2\n";
-   * TableId tableId = TableId.of(datasetName, tableName);
-   * WriteChannelConfiguration writeChannelConfiguration =
-   *     WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
-   * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
-   * // Write data to writer
-   * try {
-   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
-   * } finally {
-   *   writer.close();
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   String csvData = "StringValue1\nStringValue2\n";
+   *   TableId tableId = TableId.of(datasetName, tableName);
+   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *       .setFormatOptions(FormatOptions.csv()).build();
+   *   TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
+   *   // Write data to writer
+   *   try {
+   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+   *   } finally {
+   *     writer.close();
+   *   }
+   *   // Get load job
+   *   Job job = writer.getJob();
+   *   job = job.waitFor();
+   *   LoadStatistics stats = job.getStatistics();
+   *   return stats.getOutputRows();
    * }
-   * // Get load job
-   * Job job = writer.getJob();
-   * job = job.waitFor();
-   * LoadStatistics stats = job.getStatistics();
-   * return stats.getOutputRows();
-   * }
+ *
* *

Example of writing a local file to a table. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
-   * String location = "us";
-   * TableId tableId = TableId.of(datasetName, tableName);
-   * WriteChannelConfiguration writeChannelConfiguration =
-   *     WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
-   * // The location must be specified; other fields can be auto-detected.
-   * JobId jobId = JobId.newBuilder().setLocation(location).build();
-   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
-   * // Write data to writer
-   * try (OutputStream stream = Channels.newOutputStream(writer)) {
-   *   Files.copy(csvPath, stream);
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
+   *   String location = "us";
+   *   TableId tableId = TableId.of(datasetName, tableName);
+   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *       .setFormatOptions(FormatOptions.csv()).build();
+   *   // The location must be specified; other fields can be auto-detected.
+   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
+   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
+   *   // Write data to writer
+   *   try (OutputStream stream = Channels.newOutputStream(writer)) {
+   *     Files.copy(csvPath, stream);
+   *   }
+   *   // Get load job
+   *   Job job = writer.getJob();
+   *   job = job.waitFor();
+   *   LoadStatistics stats = job.getStatistics();
+   *   return stats.getOutputRows();
    * }
-   * // Get load job
-   * Job job = writer.getJob();
-   * job = job.waitFor();
-   * LoadStatistics stats = job.getStatistics();
-   * return stats.getOutputRows();
-   * }
+ *
* * @throws BigQueryException upon failure */ @@ -1519,29 +1625,44 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

Example of creating a channel with which to write to a table. * - *

{@code
-   * String datasetName = "my_dataset_name";
-   * String tableName = "my_table_name";
-   * String csvData = "StringValue1\nStringValue2\n";
-   * String location = "us";
-   * TableId tableId = TableId.of(datasetName, tableName);
-   * WriteChannelConfiguration writeChannelConfiguration =
-   *     WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
-   * // The location must be specified; other fields can be auto-detected.
-   * JobId jobId = JobId.newBuilder().setLocation(location).build();
-   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
-   * // Write data to writer
-   * try {
-   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
-   * } finally {
-   *   writer.close();
+   * 
+   * {
+   *   @code
+   *   String datasetName = "my_dataset_name";
+   *   String tableName = "my_table_name";
+   *   String csvData = "StringValue1\nStringValue2\n";
+   *   String location = "us";
+   *   TableId tableId = TableId.of(datasetName, tableName);
+   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *       .setFormatOptions(FormatOptions.csv()).build();
+   *   // The location must be specified; other fields can be auto-detected.
+   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
+   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
+   *   // Write data to writer
+   *   try {
+   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+   *   } finally {
+   *     writer.close();
+   *   }
+   *   // Get load job
+   *   Job job = writer.getJob();
+   *   job = job.waitFor();
+   *   LoadStatistics stats = job.getStatistics();
+   *   return stats.getOutputRows();
    * }
-   * // Get load job
-   * Job job = writer.getJob();
-   * job = job.waitFor();
-   * LoadStatistics stats = job.getStatistics();
-   * return stats.getOutputRows();
-   * }
+ *
*/ TableDataWriteChannel writer(JobId jobId, WriteChannelConfiguration writeChannelConfiguration); + + /** Gets the IAM policy for a specified table. */ + Policy getIamPolicy(TableId tableId, IAMOption... options); + + /** Sets the IAM policy for a specified table. */ + Policy setIamPolicy(TableId tableId, Policy policy, IAMOption... options); + + /** + * Tests whether the caller holds specific permissions on a BigQuery table. The returned list + * represents the subset of granted permissions. + */ + List testIamPermissions(TableId table, List permissions, IAMOption... options); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java index a3526077d..070a4c965 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java @@ -17,6 +17,8 @@ package com.google.cloud.bigquery; import static com.google.cloud.RetryHelper.runWithRetries; +import static com.google.cloud.bigquery.PolicyHelper.convertFromApiPolicy; +import static com.google.cloud.bigquery.PolicyHelper.convertToApiPolicy; import static com.google.common.base.Preconditions.checkArgument; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; @@ -33,6 +35,7 @@ import com.google.cloud.BaseService; import com.google.cloud.PageImpl; import com.google.cloud.PageImpl.NextPageFetcher; +import com.google.cloud.Policy; import com.google.cloud.RetryHelper; import com.google.cloud.RetryHelper.RetryHelperException; import com.google.cloud.Tuple; @@ -338,9 +341,11 @@ public com.google.api.services.bigquery.model.Job call() { throw createException; } - // If create RPC fails, it's still possible that the job has been successfully created, + // If create RPC fails, it's still possible that the job has been successfully + // created, // and get might work. - // We can only do this if we randomly generated the ID. Otherwise we might mistakenly + // We can only do this if we randomly generated the ID. Otherwise we might + // mistakenly // fetch a job created by someone else. Job job; try { @@ -653,7 +658,8 @@ public Table getTable(final String datasetId, final String tableId, TableOption. @Override public Table getTable(TableId tableId, TableOption... options) { - // More context about why this: https://github.com/googleapis/google-cloud-java/issues/3808 + // More context about why this: + // https://github.com/googleapis/google-cloud-java/issues/3808 final TableId completeTableId = tableId.setProjectId( Strings.isNullOrEmpty(tableId.getProject()) @@ -941,7 +947,8 @@ public InsertAllResponse insertAll(InsertAllRequest request) { requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues()); requestPb.setSkipInvalidRows(request.skipInvalidRows()); requestPb.setTemplateSuffix(request.getTemplateSuffix()); - // Using an array of size 1 here to have a mutable boolean variable, which can be modified in + // Using an array of size 1 here to have a mutable boolean variable, which can + // be modified in // an anonymous inner class. final boolean[] allInsertIdsSet = {true}; List rowsPb = @@ -1249,6 +1256,86 @@ public TableDataWriteChannel writer( writeChannelConfiguration.setProjectId(getOptions().getProjectId())); } + @Override + public Policy getIamPolicy(TableId tableId, IAMOption... options) { + final TableId completeTableId = + tableId.setProjectId( + Strings.isNullOrEmpty(tableId.getProject()) + ? getOptions().getProjectId() + : tableId.getProject()); + + try { + final Map optionsMap = optionMap(options); + return convertFromApiPolicy( + runWithRetries( + new Callable() { + @Override + public com.google.api.services.bigquery.model.Policy call() { + return bigQueryRpc.getIamPolicy(completeTableId.getIAMResourceName(), optionsMap); + } + }, + getOptions().getRetrySettings(), + EXCEPTION_HANDLER, + getOptions().getClock())); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Policy setIamPolicy(TableId tableId, final Policy policy, IAMOption... options) { + final TableId completeTableId = + tableId.setProjectId( + Strings.isNullOrEmpty(tableId.getProject()) + ? getOptions().getProjectId() + : tableId.getProject()); + try { + final Map optionsMap = optionMap(options); + return convertFromApiPolicy( + runWithRetries( + new Callable() { + @Override + public com.google.api.services.bigquery.model.Policy call() { + return bigQueryRpc.setIamPolicy( + completeTableId.getIAMResourceName(), convertToApiPolicy(policy), optionsMap); + } + }, + getOptions().getRetrySettings(), + EXCEPTION_HANDLER, + getOptions().getClock())); + } catch (RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public List testIamPermissions( + TableId tableId, final List permissions, IAMOption... options) { + final TableId completeTableId = + tableId.setProjectId( + Strings.isNullOrEmpty(tableId.getProject()) + ? getOptions().getProjectId() + : tableId.getProject()); + try { + final Map optionsMap = optionMap(options); + com.google.api.services.bigquery.model.TestIamPermissionsResponse response = + runWithRetries( + new Callable() { + @Override + public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() { + return bigQueryRpc.testIamPermissions( + completeTableId.getIAMResourceName(), permissions, optionsMap); + } + }, + getOptions().getRetrySettings(), + EXCEPTION_HANDLER, + getOptions().getClock()); + return ImmutableList.copyOf(response.getPermissions()); + } catch (RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + @VisibleForTesting static Map optionMap(Option... options) { Map optionMap = Maps.newEnumMap(BigQueryRpc.Option.class); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java index ae4db388f..5687d7898 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java @@ -94,6 +94,8 @@ private BigQueryOptions(Builder builder) { private static class BigQueryDefaults implements ServiceDefaults { + private static final long serialVersionUID = -4551722608999107711L; + @Override public BigQueryFactory getDefaultServiceFactory() { return DefaultBigQueryFactory.INSTANCE; diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/PolicyHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/PolicyHelper.java new file mode 100644 index 000000000..c830bb3e3 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/PolicyHelper.java @@ -0,0 +1,80 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.Expr; +import com.google.cloud.Binding; +import com.google.cloud.Condition; +import com.google.cloud.Policy; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; + +class PolicyHelper { + + static Policy convertFromApiPolicy(com.google.api.services.bigquery.model.Policy apiPolicy) { + Policy.Builder policyBuilder = Policy.newBuilder(); + List bindings = apiPolicy.getBindings(); + + if (null != bindings) { + ImmutableList.Builder coreBindings = ImmutableList.builder(); + Binding.Builder bindingBuilder = Binding.newBuilder(); + for (com.google.api.services.bigquery.model.Binding binding : bindings) { + bindingBuilder.setRole(binding.getRole()); + bindingBuilder.setMembers(binding.getMembers()); + if (binding.getCondition() != null) { + Condition.Builder conditionBuilder = Condition.newBuilder(); + conditionBuilder.setTitle(binding.getCondition().getTitle()); + conditionBuilder.setDescription(binding.getCondition().getDescription()); + conditionBuilder.setExpression(binding.getCondition().getExpression()); + bindingBuilder.setCondition(conditionBuilder.build()); + } + coreBindings.add(bindingBuilder.build()); + } + policyBuilder.setBindings(coreBindings.build()); + } + if (null != apiPolicy.getVersion()) { + policyBuilder.setVersion(apiPolicy.getVersion()); + } + return policyBuilder.setEtag(apiPolicy.getEtag()).build(); + } + + static com.google.api.services.bigquery.model.Policy convertToApiPolicy(Policy policy) { + List bindings = null; + if (policy.getBindingsList().size() != 0) { + bindings = new ArrayList<>(policy.getBindingsList().size()); + + for (Binding binding : policy.getBindingsList()) { + com.google.api.services.bigquery.model.Binding apiBinding = + new com.google.api.services.bigquery.model.Binding(); + apiBinding.setRole(binding.getRole()); + apiBinding.setMembers(new ArrayList<>(binding.getMembers())); + if (binding.getCondition() != null) { + Expr expr = new Expr(); + expr.setTitle(binding.getCondition().getTitle()); + expr.setDescription(binding.getCondition().getDescription()); + expr.setExpression(binding.getCondition().getExpression()); + apiBinding.setCondition(expr); + } + bindings.add(apiBinding); + } + } + return new com.google.api.services.bigquery.model.Policy() + .setBindings(bindings) + .setEtag(policy.getEtag()) + .setVersion(policy.getVersion()); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java index f475e5070..b74055d4f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java @@ -63,6 +63,12 @@ public String getTable() { return table; } + /** Returns the IAM resource name for the table. * */ + public String getIAMResourceName() { + return String.format( + "projects/%s/datasets/%s/tables/%s", getProject(), getDataset(), getTable()); + } + private TableId(String project, String dataset, String table) { checkArgument(!isNullOrEmpty(dataset), "Provided dataset is null or empty"); checkArgument(!isNullOrEmpty(table), "Provided table is null or empty"); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java index 7ba0cd764..c0b9bb4be 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java @@ -21,14 +21,17 @@ import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.Job; import com.google.api.services.bigquery.model.Model; +import com.google.api.services.bigquery.model.Policy; import com.google.api.services.bigquery.model.Routine; import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.TableDataInsertAllRequest; import com.google.api.services.bigquery.model.TableDataInsertAllResponse; import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TestIamPermissionsResponse; import com.google.cloud.ServiceRpc; import com.google.cloud.Tuple; import com.google.cloud.bigquery.BigQueryException; +import java.util.List; import java.util.Map; @InternalExtensionOnly @@ -48,7 +51,8 @@ enum Option { PARENT_JOB_ID("parentJobId"), START_INDEX("startIndex"), STATE_FILTER("stateFilter"), - TIMEOUT("timeoutMs"); + TIMEOUT("timeoutMs"), + REQUESTED_POLICY_VERSION("requestedPolicyVersion"); private final String value; @@ -300,4 +304,27 @@ Job write( long destOffset, int length, boolean last); + + /** + * Returns the IAM Policy for the specified resource, using Policy V1. + * + * @throws BigQueryException upon failure + */ + Policy getIamPolicy(String resourceId, Map options); + + /** + * Updates the IAM policy for the specified resource. + * + * @throws BigQueryException upon failure + */ + Policy setIamPolicy(String resourceId, Policy policy, Map options); + + /** + * Tests whether the caller holds the provided permissions for the specified resource. Returns the + * subset of permissions the caller actually holds. + * + * @throws BigQueryException upon failure + */ + TestIamPermissionsResponse testIamPermissions( + String resourceId, List permissions, Map options); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java index 391209fcb..9aaef6db5 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java @@ -37,6 +37,8 @@ import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.DatasetList; import com.google.api.services.bigquery.model.DatasetReference; +import com.google.api.services.bigquery.model.GetIamPolicyRequest; +import com.google.api.services.bigquery.model.GetPolicyOptions; import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.Job; import com.google.api.services.bigquery.model.JobList; @@ -45,14 +47,18 @@ import com.google.api.services.bigquery.model.ListRoutinesResponse; import com.google.api.services.bigquery.model.Model; import com.google.api.services.bigquery.model.ModelReference; +import com.google.api.services.bigquery.model.Policy; import com.google.api.services.bigquery.model.Routine; import com.google.api.services.bigquery.model.RoutineReference; +import com.google.api.services.bigquery.model.SetIamPolicyRequest; import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.TableDataInsertAllRequest; import com.google.api.services.bigquery.model.TableDataInsertAllResponse; import com.google.api.services.bigquery.model.TableDataList; import com.google.api.services.bigquery.model.TableList; import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TestIamPermissionsRequest; +import com.google.api.services.bigquery.model.TestIamPermissionsResponse; import com.google.cloud.Tuple; import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; @@ -71,7 +77,8 @@ public class HttpBigQueryRpc implements BigQueryRpc { public static final String DEFAULT_PROJECTION = "full"; private static final String BASE_RESUMABLE_URI = "https://www.googleapis.com/upload/bigquery/v2/projects/"; - // see: https://cloud.google.com/bigquery/loading-data-post-request#resume-upload + // see: + // https://cloud.google.com/bigquery/loading-data-post-request#resume-upload private static final int HTTP_RESUME_INCOMPLETE = 308; private final BigQueryOptions options; private final Bigquery bigquery; @@ -657,4 +664,43 @@ public Job write( throw translate(ex); } } + + @Override + public Policy getIamPolicy(String resourceId, Map options) { + try { + GetIamPolicyRequest policyRequest = new GetIamPolicyRequest(); + if (null != Option.REQUESTED_POLICY_VERSION.getLong(options)) { + policyRequest = + policyRequest.setOptions( + new GetPolicyOptions() + .setRequestedPolicyVersion( + Option.REQUESTED_POLICY_VERSION.getLong(options).intValue())); + } + return bigquery.tables().getIamPolicy(resourceId, policyRequest).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Policy setIamPolicy(String resourceId, Policy policy, Map options) { + try { + SetIamPolicyRequest policyRequest = new SetIamPolicyRequest().setPolicy(policy); + return bigquery.tables().setIamPolicy(resourceId, policyRequest).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public TestIamPermissionsResponse testIamPermissions( + String resourceId, List permissions, Map options) { + try { + TestIamPermissionsRequest permissionsRequest = + new TestIamPermissionsRequest().setPermissions(permissions); + return bigquery.tables().testIamPermissions(resourceId, permissionsRequest).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java index 6435ff91a..b04aa4577 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java @@ -40,6 +40,7 @@ import com.google.api.services.bigquery.model.TableDataInsertAllResponse; import com.google.api.services.bigquery.model.TableDataList; import com.google.api.services.bigquery.model.TableRow; +import com.google.cloud.Policy; import com.google.cloud.ServiceOptions; import com.google.cloud.Tuple; import com.google.cloud.bigquery.BigQuery.QueryResultsOption; @@ -486,6 +487,15 @@ public class BigQueryImplTest { .setMaxBadRecords(10) .build(); + private static final Policy SAMPLE_IAM_POLICY = + Policy.newBuilder() + .addIdentity( + com.google.cloud.Role.of("roles/bigquery.dataViewer"), + com.google.cloud.Identity.allUsers()) + .setEtag(ETAG) + .setVersion(1) + .build(); + private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; private BigQueryRpc bigqueryRpcMock; @@ -2227,4 +2237,48 @@ public void testWriteChannel() throws IOException { verify(bigqueryRpcMock) .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } + + @Test + public void testGetIamPolicy() { + final String resourceId = + String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); + final com.google.api.services.bigquery.model.Policy apiPolicy = + PolicyHelper.convertToApiPolicy(SAMPLE_IAM_POLICY); + when(bigqueryRpcMock.getIamPolicy(resourceId, EMPTY_RPC_OPTIONS)).thenReturn(apiPolicy); + bigquery = options.getService(); + Policy policy = bigquery.getIamPolicy(TABLE_ID); + assertEquals(policy, SAMPLE_IAM_POLICY); + verify(bigqueryRpcMock).getIamPolicy(resourceId, EMPTY_RPC_OPTIONS); + } + + @Test + public void testSetIamPolicy() { + final String resourceId = + String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); + final com.google.api.services.bigquery.model.Policy apiPolicy = + PolicyHelper.convertToApiPolicy(SAMPLE_IAM_POLICY); + when(bigqueryRpcMock.setIamPolicy(resourceId, apiPolicy, EMPTY_RPC_OPTIONS)) + .thenReturn(apiPolicy); + bigquery = options.getService(); + Policy returnedPolicy = bigquery.setIamPolicy(TABLE_ID, SAMPLE_IAM_POLICY); + assertEquals(returnedPolicy, SAMPLE_IAM_POLICY); + verify(bigqueryRpcMock).setIamPolicy(resourceId, apiPolicy, EMPTY_RPC_OPTIONS); + } + + @Test + public void testTestIamPermissions() { + final String resourceId = + String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); + final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); + final List grantedPermissions = ImmutableList.of("foo", "bar"); + final com.google.api.services.bigquery.model.TestIamPermissionsResponse response = + new com.google.api.services.bigquery.model.TestIamPermissionsResponse() + .setPermissions(grantedPermissions); + when(bigqueryRpcMock.testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) + .thenReturn(response); + bigquery = options.getService(); + List perms = bigquery.testIamPermissions(TABLE_ID, checkedPermissions); + assertEquals(perms, grantedPermissions); + verify(bigqueryRpcMock).testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java new file mode 100644 index 000000000..291df79fd --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.api.services.bigquery.model.Binding; +import com.google.cloud.Identity; +import com.google.cloud.Policy; +import com.google.cloud.Role; +import com.google.common.collect.ImmutableList; +import org.junit.Test; + +public class PolicyHelperTest { + + public static final String ETAG = "etag"; + public static final String ROLE1 = "roles/bigquery.admin"; + public static final String ROLE2 = "roles/bigquery.dataEditor"; + public static final String USER1 = "user1@gmail.com"; + public static final String USER2 = "user2@gmail.com"; + + static final com.google.api.services.bigquery.model.Policy API_POLICY = + new com.google.api.services.bigquery.model.Policy() + .setBindings( + ImmutableList.of( + new Binding() + .setRole(ROLE1) + .setMembers(ImmutableList.of(String.format("user:%s", USER1))), + new Binding() + .setRole(ROLE2) + .setMembers(ImmutableList.of(String.format("user:%s", USER2), "allUsers")))) + .setEtag(ETAG) + .setVersion(1); + static final com.google.api.services.bigquery.model.Policy API_POLICY_NO_BINDINGS = + new com.google.api.services.bigquery.model.Policy().setEtag(ETAG).setVersion(1); + + static final Policy IAM_POLICY = + Policy.newBuilder() + .addIdentity(Role.of(ROLE1), Identity.user(USER1)) + .addIdentity(Role.of(ROLE2), Identity.user(USER2), Identity.allUsers()) + .setEtag(ETAG) + .setVersion(1) + .build(); + static final Policy IAM_POLICY_NO_BINDINGS = + Policy.newBuilder().setEtag(ETAG).setVersion(1).build(); + + @Test + public void testConversionWithBindings() { + assertEquals(IAM_POLICY, PolicyHelper.convertFromApiPolicy(API_POLICY)); + assertEquals(API_POLICY, PolicyHelper.convertToApiPolicy(IAM_POLICY)); + assertEquals( + IAM_POLICY, PolicyHelper.convertFromApiPolicy(PolicyHelper.convertToApiPolicy(IAM_POLICY))); + assertEquals( + API_POLICY, PolicyHelper.convertToApiPolicy(PolicyHelper.convertFromApiPolicy(API_POLICY))); + } + + @Test + public void testConversionNoBindings() { + assertEquals(IAM_POLICY_NO_BINDINGS, PolicyHelper.convertFromApiPolicy(API_POLICY_NO_BINDINGS)); + assertEquals(API_POLICY_NO_BINDINGS, PolicyHelper.convertToApiPolicy(IAM_POLICY_NO_BINDINGS)); + assertEquals( + IAM_POLICY_NO_BINDINGS, + PolicyHelper.convertFromApiPolicy(PolicyHelper.convertToApiPolicy(IAM_POLICY_NO_BINDINGS))); + assertEquals( + API_POLICY_NO_BINDINGS, + PolicyHelper.convertToApiPolicy(PolicyHelper.convertFromApiPolicy(API_POLICY_NO_BINDINGS))); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java index 1166c3229..dc28ff861 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java @@ -24,6 +24,8 @@ public class TableIdTest { private static final TableId TABLE = TableId.of("dataset", "table"); private static final TableId TABLE_COMPLETE = TableId.of("project", "dataset", "table"); + private static final String TABLE_IAM_RESOURCE_NAME = + "projects/project/datasets/dataset/tables/table"; @Test public void testOf() { @@ -33,6 +35,7 @@ public void testOf() { assertEquals("project", TABLE_COMPLETE.getProject()); assertEquals("dataset", TABLE_COMPLETE.getDataset()); assertEquals("table", TABLE_COMPLETE.getTable()); + assertEquals(TABLE_IAM_RESOURCE_NAME, TABLE_COMPLETE.getIAMResourceName()); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index cd69d8afd..fcbb84fda 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -31,7 +32,10 @@ import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.Date; +import com.google.cloud.Identity; +import com.google.cloud.Policy; import com.google.cloud.RetryOption; +import com.google.cloud.Role; import com.google.cloud.ServiceOptions; import com.google.cloud.bigquery.Acl; import com.google.cloud.bigquery.BigQuery; @@ -741,6 +745,36 @@ public void testCreateMaterializedViewTable() { assertTrue(remoteTable.delete()); } + @Test + public void testTableIAM() { + String tableName = "test_iam_table"; + TableId tableId = TableId.of(DATASET, tableName); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + + bigquery.create(TableInfo.of(tableId, tableDefinition)); + + // Check we have some of the expected default permissions as we created the table. + List checkedPerms = + ImmutableList.of( + "bigquery.tables.get", "bigquery.tables.getData", "bigquery.tables.update"); + List grantedPerms = bigquery.testIamPermissions(tableId, checkedPerms); + assertEquals(checkedPerms, grantedPerms); + + // get and modify policy + Policy policy = bigquery.getIamPolicy(tableId); + Policy editedPolicy = + policy + .toBuilder() + .addIdentity(Role.of("roles/bigquery.dataViewer"), Identity.allUsers()) + .build(); + Policy updatedPolicy = bigquery.setIamPolicy(tableId, editedPolicy); + // We should have a different etag, so the policies aren't strictly equal + assertNotEquals(updatedPolicy, editedPolicy); + // However, the bindings should be. + assertEquals(updatedPolicy.getBindingsList(), editedPolicy.getBindingsList()); + } + @Test public void testListTables() { String tableName = "test_list_tables";