diff --git a/proto-google-common-protos/src/main/proto/google/cloud/audit/bigquery_audit_metadata.proto b/proto-google-common-protos/src/main/proto/google/cloud/audit/bigquery_audit_metadata.proto new file mode 100644 index 00000000..f3a87ac5 --- /dev/null +++ b/proto-google-common-protos/src/main/proto/google/cloud/audit/bigquery_audit_metadata.proto @@ -0,0 +1,1184 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.audit; + +import "google/iam/v1/policy.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; + +option csharp_namespace = "Google.Cloud.Audit"; +option go_package = "google.golang.org/genproto/googleapis/cloud/audit;audit"; +option java_multiple_files = true; +option java_outer_classname = "BigQueryAuditMetadataProto"; +option java_package = "com.google.cloud.audit"; +option objc_class_prefix = "GCA"; +option php_namespace = "Google\\Cloud\\Audit"; + +// Audit log format for BigQuery cloud audit logs metadata. +// +message BigQueryAuditMetadata { + // Job insertion event. + message JobInsertion { + // Describes how the job was inserted. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Job was inserted using the jobs.insert API. + JOB_INSERT_REQUEST = 1; + + // Job was inserted using the jobs.query RPC. + QUERY_REQUEST = 2; + } + + // Job metadata. + Job job = 1; + + // Describes how the job was inserted. + Reason reason = 2; + } + + // Job state change event. + message JobChange { + // Job state before the job state change. + JobState before = 1; + + // Job state after the job state change. + JobState after = 2; + + // Job metadata. + Job job = 3; + } + + // Dataset creation event. + message DatasetCreation { + // Describes how the dataset was created. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Dataset was created using the datasets.create API. + CREATE = 1; + + // Dataset was created using a query job, e.g., CREATE SCHEMA statement. + QUERY = 2; + } + + // Dataset metadata. + Dataset dataset = 1; + + // Describes how the dataset was created. + Reason reason = 2; + + // The URI of the job that created the dataset. + // Present if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 3; + } + + // Dataset change event. + message DatasetChange { + // Describes how the dataset was changed. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Dataset was changed using the datasets.update or datasets.patch API. + UPDATE = 1; + + // Dataset was changed using the SetIamPolicy API. + SET_IAM_POLICY = 2; + + // Dataset was changed using a query job, e.g., ALTER SCHEMA statement. + QUERY = 3; + } + + // Dataset metadata after the change. + Dataset dataset = 1; + + // Describes how the dataset was changed. + Reason reason = 2; + + // The URI of the job that updated the dataset. + // Present if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 3; + } + + // Dataset deletion event. + message DatasetDeletion { + // Describes how the dataset was deleted. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Dataset was deleted using the datasets.delete API. + DELETE = 1; + + // Dataset was deleted using a query job, e.g., DROP SCHEMA statement. + QUERY = 2; + } + + // Describes how the dataset was deleted. + Reason reason = 1; + + // The URI of the job that deleted the dataset. + // Present if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 2; + } + + // Table creation event. + message TableCreation { + // Describes how the table was created. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Table was created as a destination table during a query, load or copy + // job. + JOB = 1; + + // Table was created using a DDL query. + QUERY = 2; + + // Table was created using the tables.create API. + TABLE_INSERT_REQUEST = 3; + } + + // Table metadata. + Table table = 1; + + // Describes how the table was created. + Reason reason = 3; + + // The URI of the job that created a table. + // Present if the reason is JOB or QUERY. + // + // Format: `projects//jobs/`. + string job_name = 4; + } + + // Model creation event. + message ModelCreation { + // Describes how the model was created. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Model was created using a DDL query. + QUERY = 2; + } + + // Model metadata. + Model model = 1; + + // Describes how the model was created. + Reason reason = 3; + + // The URI of the job that created the model. + // + // Format: `projects//jobs/`. + string job_name = 4; + } + + // Routine creation event. + message RoutineCreation { + // Describes how the routine was created. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Routine was created using a DDL query. + QUERY = 1; + + // Routine was created using the routines.create API. + ROUTINE_INSERT_REQUEST = 2; + } + + // Created routine. + Routine routine = 1; + + // Describes how the routine was created. + Reason reason = 3; + + // The URI of the job that created the routine. + // + // Format: `projects//jobs/`. + string job_name = 4; + } + + // Table data read event. + message TableDataRead { + // Describes how the table data was read. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Table was used as a source table during a BigQuery job. + JOB = 1; + + // Table data was accessed using the tabledata.list API. + TABLEDATA_LIST_REQUEST = 2; + + // Table data was accessed using the jobs.getQueryResults API. + GET_QUERY_RESULTS_REQUEST = 3; + + // Table data was accessed using the jobs.query RPC. + QUERY_REQUEST = 4; + + // Table data was accessed using storage.CreateReadSession API. + CREATE_READ_SESSION = 5; + + // Table data was accessed during a materialized view refresh. + MATERIALIZED_VIEW_REFRESH = 6; + } + + // List of the accessed fields. Entire list is truncated if the record size + // exceeds 100K. + repeated string fields = 2; + + // True if the fields list was truncated. + bool fields_truncated = 8; + + // List of the referenced policy tags. That is, policy tags attached to the + // accessed fields or their ancestors. + // Policy tag resource name is a string of the format: + // `projects//locations//taxonomies//policyTags/` + repeated string policy_tags = 9; + + // True if the policy tag list was truncated. At most 100 policy tags can be + // saved. + bool policy_tags_truncated = 10; + + // Describes how the table data was read. + Reason reason = 3; + + // The URI of the job that read a table. + // Present if the reason is JOB but can be redacted for privacy reasons. + // + // Format: `projects//jobs/`. + string job_name = 4; + + // The URI of the read session that read a table. + // Present if the reason is CREATE_READ_SESSION. + // + // Format: + // `projects//locations//sessions/`. + string session_name = 5; + } + + // Table metadata change event. + message TableChange { + // Describes how the table metadata was changed. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Table metadata was updated using the tables.update or tables.patch API. + TABLE_UPDATE_REQUEST = 1; + + // Table was used as a job destination table. + JOB = 2; + + // Table metadata was updated using a DML or DDL query. + QUERY = 3; + } + + // Updated table metadata. + Table table = 1; + + // True if the table was truncated. + bool truncated = 4; + + // Describes how the table metadata was changed. + Reason reason = 5; + + // The URI of the job that changed a table. + // Present if the reason is JOB or QUERY. + // + // Format: `projects//jobs/`. + string job_name = 6; + } + + // Model metadata change event. + message ModelMetadataChange { + // Describes how the model metadata was changed. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Model metadata was updated using the models.patch API. + MODEL_PATCH_REQUEST = 1; + + // Model metadata was updated using a DDL query. + QUERY = 2; + } + + // Updated model. + Model model = 1; + + // Describes how the model metadata was changed. + Reason reason = 2; + + // The URI of the job that changed the model metadata. + // Present if and only if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 3; + } + + // Routine change event. + message RoutineChange { + // Describes how the routine was updated. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Routine was updated using a DDL query. + QUERY = 1; + + // Routine was updated using the routines.update or routines.patch API. + ROUTINE_UPDATE_REQUEST = 2; + } + + // Updated routine. + Routine routine = 1; + + // Describes how the routine was updated. + Reason reason = 3; + + // The URI of the job that updated the routine. + // + // Format: `projects//jobs/`. + string job_name = 4; + } + + // Table data change event. + message TableDataChange { + // Describes how the table data was changed. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Table was used as a job destination table. + JOB = 1; + + // Table data was updated using a DML or DDL query. + QUERY = 2; + + // Table data was updated during a materialized view refresh. + MATERIALIZED_VIEW_REFRESH = 3; + + // Table data was added using the Write API. + WRITE_API = 4; + } + + // Number of deleted rows. + int64 deleted_rows_count = 1; + + // Number of inserted rows. + int64 inserted_rows_count = 2; + + // True if the table was truncated. + bool truncated = 3; + + // Describes how the table data was changed. + Reason reason = 4; + + // The URI of the job that changed a table. + // + // Format: `projects//jobs/`. + string job_name = 5; + + // If written from WRITE_API, the name of the stream. + // + // Format: + // `projects//datasets//tables//streams/` + string stream_name = 6; + } + + // Model data change event. + message ModelDataChange { + // Describes how the model data was changed. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Model data was changed using a DDL query. + QUERY = 1; + } + + // Describes how the model data was changed. + Reason reason = 1; + + // The URI of the job that changed the model data. + // + // Format: `projects//jobs/`. + string job_name = 2; + } + + // Model data read event. + message ModelDataRead { + // Describes how the model data was read. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Model was used as a source model during a BigQuery job. + JOB = 1; + } + + // Describes how the model data was read. + Reason reason = 1; + + // The URI of the job that read the model data. + // + // Format: `projects//jobs/`. + string job_name = 2; + } + + // BigQuery dataset. + message Dataset { + // Dataset URI. + // + // Format: `projects//datasets/`. + string dataset_name = 1; + + // Dataset creation time. + google.protobuf.Timestamp create_time = 3; + + // Dataset metadata last update time. + google.protobuf.Timestamp update_time = 4; + + // The access control list for the dataset. + BigQueryAcl acl = 5; + + // Default expiration time for tables in the dataset. + google.protobuf.Duration default_table_expire_duration = 6; + + // User-provided metadata for the dataset. + EntityInfo dataset_info = 7; + + // Default encryption for tables in the dataset. + EncryptionInfo default_encryption = 8; + } + + // Table deletion event. + message TableDeletion { + // Describes how the table was deleted. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Table was deleted using the tables.delete API. + TABLE_DELETE_REQUEST = 2; + + // Table expired. + EXPIRED = 3; + + // Table deleted using a DDL query. + QUERY = 4; + } + + // Describes how the table was deleted. + Reason reason = 1; + + // The URI of the job that deleted a table. + // Present if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 2; + } + + // Model deletion event. + message ModelDeletion { + // Describes how the model was deleted. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Model was deleted using the models.delete API. + MODEL_DELETE_REQUEST = 1; + + // Model expired. + EXPIRED = 2; + + // Model was deleted using DDL query. + QUERY = 3; + } + + // Describes how the model was deleted. + Reason reason = 1; + + // The URI of the job that deleted a model. + // Present if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 2; + } + + // Trained BigQuery ML model. + message Model { + // Model URI. + // + // Format: `projects//datasets//models/`. + string model_name = 1; + + // User-provided metadata for the model. + EntityInfo model_info = 2; + + // Model expiration time. + google.protobuf.Timestamp expire_time = 5; + + // Model creation time. + google.protobuf.Timestamp create_time = 6; + + // Model last update time. + google.protobuf.Timestamp update_time = 7; + + // Model encryption information. Set when non-default encryption is used. + EncryptionInfo encryption = 8; + } + + // Routine deletion event. + message RoutineDeletion { + // Describes how the routine was deleted. + enum Reason { + // Unknown. + REASON_UNSPECIFIED = 0; + + // Routine was deleted using DDL query. + QUERY = 1; + + // Routine was deleted using the API. + ROUTINE_DELETE_REQUEST = 2; + } + + // Deleted routine. + Routine routine = 1; + + // Describes how the routine was deleted. + Reason reason = 3; + + // The URI of the job that deleted the routine. + // Present if the reason is QUERY. + // + // Format: `projects//jobs/`. + string job_name = 4; + } + + // BigQuery job. + message Job { + // Job URI. + // + // Format: `projects//jobs/`. + string job_name = 1; + + // Job configuration. + JobConfig job_config = 2; + + // Job status. + JobStatus job_status = 3; + + // Job statistics. + JobStats job_stats = 4; + } + + // Job configuration. + // See the [Jobs](https://cloud.google.com/bigquery/docs/reference/v2/jobs) + // API resource for more details on individual fields. + message JobConfig { + // Query job configuration. + message Query { + // Priority given to the query. + enum Priority { + // Unknown. + PRIORITY_UNSPECIFIED = 0; + + // Interactive query. + QUERY_INTERACTIVE = 1; + + // Batch query. + QUERY_BATCH = 2; + } + + // The SQL query to run. Truncated if exceeds 50K. + string query = 1; + + // True if the query field was truncated. + bool query_truncated = 10; + + // The destination table for the query results. + string destination_table = 2; + + // Destination table create disposition. + CreateDisposition create_disposition = 3; + + // Destination table write disposition. + WriteDisposition write_disposition = 4; + + // Default dataset for the query. + string default_dataset = 5; + + // External data sources used in the query. + repeated TableDefinition table_definitions = 6; + + // Priority given to the query. + Priority priority = 7; + + // Result table encryption information. Set when non-default encryption is + // used. + EncryptionInfo destination_table_encryption = 8; + + // Type of the query. + QueryStatementType statement_type = 9; + } + + // Load job configuration. + message Load { + // URIs for the data to be imported. Entire list is truncated if exceeds + // 40K. + repeated string source_uris = 1; + + // True if the source_URIs field was truncated. + bool source_uris_truncated = 7; + + // The table schema in JSON format. Entire field is truncated if exceeds + // 40K. + string schema_json = 2; + + // True if the schema_json field was truncated. + bool schema_json_truncated = 8; + + // The destination table for the import. + string destination_table = 3; + + // Destination table create disposition. + CreateDisposition create_disposition = 4; + + // Destination table write disposition. + WriteDisposition write_disposition = 5; + + // Result table encryption information. Set when non-default encryption is + // used. + EncryptionInfo destination_table_encryption = 6; + } + + // Extract job configuration. + message Extract { + // URIs where extracted data should be written. Entire list is truncated + // if exceeds 50K. + repeated string destination_uris = 1; + + // True if the destination_URIs field was truncated. + bool destination_uris_truncated = 3; + + oneof source { + // The source table. + string source_table = 2; + + // The source model. + string source_model = 4; + } + } + + // Table copy job configuration. + message TableCopy { + // Source tables. Entire list is truncated if exceeds 50K. + repeated string source_tables = 1; + + // True if the source_tables field was truncated. + bool source_tables_truncated = 6; + + // Destination table. + string destination_table = 2; + + // Destination table create disposition. + CreateDisposition create_disposition = 3; + + // Destination table write disposition. + WriteDisposition write_disposition = 4; + + // Result table encryption information. Set when non-default encryption is + // used. + EncryptionInfo destination_table_encryption = 5; + + // Supported operation types in the table copy job. + OperationType operation_type = 7; + + // Expiration time set on the destination table. Expired tables will be + // deleted and their storage reclaimed. + google.protobuf.Timestamp destination_expiration_time = 8; + } + + // Job type. + enum Type { + // Unknown. + TYPE_UNSPECIFIED = 0; + + // Query job. + QUERY = 1; + + // Table copy job. + COPY = 2; + + // Export (extract) job. + EXPORT = 3; + + // Import (load) job. + IMPORT = 4; + } + + // Job type. + Type type = 1; + + // Job configuration information. + oneof config { + // Query job information. + Query query_config = 2; + + // Load job information. + Load load_config = 3; + + // Extract job information. + Extract extract_config = 4; + + // TableCopy job information. + TableCopy table_copy_config = 5; + } + + // Labels provided for the job. + map labels = 6; + } + + // Definition of an external data source used in a query. + message TableDefinition { + // Name of the table, used in queries. + string name = 1; + + // URIs for the data. + repeated string source_uris = 2; + } + + // Status of a job. + message JobStatus { + // State of the job. + JobState job_state = 1; + + // Job error, if the job failed. + google.rpc.Status error_result = 2; + + // Errors encountered during the running of the job. Does not necessarily + // mean that the job has completed or was unsuccessful. + repeated google.rpc.Status errors = 3; + } + + // Job statistics. + message JobStats { + // Query job statistics. + message Query { + // Total bytes processed by the query job. + int64 total_processed_bytes = 1; + + // Total bytes billed by the query job. + int64 total_billed_bytes = 2; + + // The tier assigned by the CPU-based billing. + int32 billing_tier = 3; + + // Tables accessed by the query job. + repeated string referenced_tables = 6; + + // Views accessed by the query job. + repeated string referenced_views = 7; + + // Routines accessed by the query job. + repeated string referenced_routines = 10; + + // Number of output rows produced by the query job. + int64 output_row_count = 8; + + // True if the query job results were read from the query cache. + bool cache_hit = 9; + } + + // Load job statistics. + message Load { + // Total bytes loaded by the import job. + int64 total_output_bytes = 1; + } + + // Extract job statistics. + message Extract { + // Total bytes exported by the extract job. + int64 total_input_bytes = 1; + } + + // Job resource usage breakdown by reservation. + message ReservationResourceUsage { + // Reservation name or "unreserved" for on-demand resources usage. + string name = 1; + + // Total slot milliseconds used by the reservation for a particular job. + int64 slot_ms = 2; + } + + // Job creation time. + google.protobuf.Timestamp create_time = 1; + + // Job execution start time. + google.protobuf.Timestamp start_time = 2; + + // Job completion time. + google.protobuf.Timestamp end_time = 3; + + // Statistics specific to the job type. + oneof extended { + // Query job statistics. + Query query_stats = 8; + + // Load job statistics. + Load load_stats = 9; + + // Extract job statistics. + Extract extract_stats = 13; + } + + // The total number of slot-ms consumed by the query job. + int64 total_slot_ms = 10; + + // Reservation usage attributed from each tier of a reservation hierarchy. + repeated ReservationResourceUsage reservation_usage = 11; + + // Parent job name. Only present for child jobs. + string parent_job_name = 12; + } + + // Describes whether a job should overwrite or append the existing destination + // table if it already exists. + enum WriteDisposition { + // Unknown. + WRITE_DISPOSITION_UNSPECIFIED = 0; + + // This job should only be writing to empty tables. + WRITE_EMPTY = 1; + + // This job will truncate the existing table data. + WRITE_TRUNCATE = 2; + + // This job will append to the table. + WRITE_APPEND = 3; + } + + // BigQuery table. + message Table { + // Table URI. + // + // Format: `projects//datasets//tables/`. + string table_name = 1; + + // A JSON representation of the table's schema. Entire field is truncated + // if exceeds 40K. + string schema_json = 3; + + // True if the schema_json field was truncated. + bool schema_json_truncated = 11; + + // View metadata. Only present for views. + TableViewDefinition view = 4; + + // Table expiration time. + google.protobuf.Timestamp expire_time = 5; + + // The table creation time. + google.protobuf.Timestamp create_time = 6; + + // The last time metadata update time. + google.protobuf.Timestamp update_time = 7; + + // The last table truncation time. + google.protobuf.Timestamp truncate_time = 8; + + // Table encryption information. Set when non-default encryption is used. + EncryptionInfo encryption = 9; + + // User-provided metadata for the table. + EntityInfo table_info = 10; + } + + // User Defined Function (UDF) or Stored Procedure. + message Routine { + // Routine URI. + // + // Format: + // `projects//datasets//routines/`. + string routine_name = 1; + + // Routine creation time. + google.protobuf.Timestamp create_time = 5; + + // Routine last update time. + google.protobuf.Timestamp update_time = 6; + } + + // Table copy job operation type. + enum OperationType { + // Unspecified operation type. + OPERATION_TYPE_UNSPECIFIED = 0; + + // The source and the destination table have the same table type. + COPY = 1; + + // The source table type is TABLE and + // the destination table type is SNAPSHOT. + SNAPSHOT = 2; + + // The source table type is SNAPSHOT and + // the destination table type is TABLE. + RESTORE = 3; + } + + // User-provided metadata for an entity, for e.g. dataset, table or model. + message EntityInfo { + // A short name for the entity. + string friendly_name = 1; + + // A long description for the entity. + string description = 2; + + // Labels provided for the entity. + map labels = 3; + } + + // View definition. + message TableViewDefinition { + // SQL query defining the view. Truncated if exceeds 40K. + string query = 1; + + // True if the schema_json field was truncated. + bool query_truncated = 2; + } + + // An access control list. + message BigQueryAcl { + // IAM policy for the resource. + google.iam.v1.Policy policy = 1; + + // List of authorized views for a dataset. + // + // Format: `projects//datasets//tables/`. + repeated string authorized_views = 2; + } + + // Encryption properties for a table or a job + message EncryptionInfo { + // Cloud kms key identifier. + // + // Format: + // `projects//locations//keyRings//cryptoKeys/` + string kms_key_name = 1; + } + + // Describes whether a job should create a destination table if it doesn't + // exist. + enum CreateDisposition { + // Unknown. + CREATE_DISPOSITION_UNSPECIFIED = 0; + + // This job should never create tables. + CREATE_NEVER = 1; + + // This job should create a table if it doesn't already exist. + CREATE_IF_NEEDED = 2; + } + + // State of a job. + enum JobState { + // State unknown. + JOB_STATE_UNSPECIFIED = 0; + + // Job is waiting for the resources. + PENDING = 1; + + // Job is running. + RUNNING = 2; + + // Job is done. + DONE = 3; + } + + // Type of the statement (e.g. SELECT, INSERT, CREATE_TABLE, CREATE_MODEL..) + enum QueryStatementType { + // Unknown. + QUERY_STATEMENT_TYPE_UNSPECIFIED = 0; + + // SELECT ... FROM <Table list> ... + SELECT = 1; + + // ASSERT <condition> AS 'description' + ASSERT = 23; + + // INSERT INTO <Table> .... + INSERT = 2; + + // UPDATE <Table> SET ... + UPDATE = 3; + + // DELETE <Table> ... + DELETE = 4; + + // MERGE INTO <Table> .... + MERGE = 5; + + // CREATE TABLE <Table> <column list> + CREATE_TABLE = 6; + + // CREATE TABLE <Table> AS SELECT + CREATE_TABLE_AS_SELECT = 7; + + // CREATE VIEW <View> + CREATE_VIEW = 8; + + // CREATE MODEL <Model> AS <Query> + CREATE_MODEL = 9; + + // CREATE MATERIALIZED VIEW <View> AS ... + CREATE_MATERIALIZED_VIEW = 13; + + // CREATE FUNCTION <Function>(<Signature>) AS ... + CREATE_FUNCTION = 14; + + // CREATE PROCEDURE <Procedure> + CREATE_PROCEDURE = 20; + + // CREATE SCHEMA <Schema> + CREATE_SCHEMA = 53; + + // DROP TABLE <Table> + DROP_TABLE = 10; + + // DROP EXTERNAL TABLE <Table> + DROP_EXTERNAL_TABLE = 33; + + // DROP VIEW <View> + DROP_VIEW = 11; + + // DROP MODEL <Model> + DROP_MODEL = 12; + + // DROP MATERIALIZED VIEW <View> + DROP_MATERIALIZED_VIEW = 15; + + // DROP FUNCTION <Function> + DROP_FUNCTION = 16; + + // DROP PROCEDURE <Procedure> + DROP_PROCEDURE = 21; + + // DROP SCHEMA <Schema> + DROP_SCHEMA = 54; + + // ALTER TABLE <Table> + ALTER_TABLE = 17; + + // ALTER VIEW <View> + ALTER_VIEW = 18; + + // ALTER MATERIALIZED_VIEW <view> + ALTER_MATERIALIZED_VIEW = 22; + + // ALTER SCHEMA <Schema> + ALTER_SCHEMA = 55; + + // Script + SCRIPT = 19; + + // TRUNCATE TABLE <Table> + TRUNCATE_TABLE = 26; + + // CREATE EXTERNAL TABLE <TABLE> + CREATE_EXTERNAL_TABLE = 27; + + // EXPORT DATA; + EXPORT_DATA = 28; + + // CALL <stored procedure> + CALL = 29; + } + + // BigQuery event information. + oneof event { + // Job insertion event. + JobInsertion job_insertion = 1; + + // Job state change event. + JobChange job_change = 2; + + // Dataset creation event. + DatasetCreation dataset_creation = 3; + + // Dataset change event. + DatasetChange dataset_change = 4; + + // Dataset deletion event. + DatasetDeletion dataset_deletion = 5; + + // Table creation event. + TableCreation table_creation = 6; + + // Table metadata change event. + TableChange table_change = 8; + + // Table deletion event. + TableDeletion table_deletion = 9; + + // Table data read event. + TableDataRead table_data_read = 10; + + // Table data change event. + TableDataChange table_data_change = 11; + + // Model deletion event. + ModelDeletion model_deletion = 12; + + // Model creation event. + ModelCreation model_creation = 13; + + // Model metadata change event. + ModelMetadataChange model_metadata_change = 14; + + // Model data change event. + ModelDataChange model_data_change = 15; + + // Model data read event. + ModelDataRead model_data_read = 19; + + // Routine creation event. + RoutineCreation routine_creation = 16; + + // Routine change event. + RoutineChange routine_change = 17; + + // Routine deletion event. + RoutineDeletion routine_deletion = 18; + } +} diff --git a/synth.metadata b/synth.metadata index 32d0ffaf..18948cb0 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-common-protos.git", - "sha": "4942688dbe02580ea0be48e9a9366d489bb3c9d6" + "sha": "66db02e9b97c724179a2110671a51bc2ce92b3df" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "aa136fa6649f7d999bf3873d0e5fca75cb2354a5", - "internalRef": "382337225" + "sha": "ed739492993c4a99629b6430affdd6c0fb59d435", + "internalRef": "391764852" } }, { @@ -367,6 +367,7 @@ "proto-google-common-protos/src/main/proto/google/api/usage.proto", "proto-google-common-protos/src/main/proto/google/api/visibility.proto", "proto-google-common-protos/src/main/proto/google/cloud/audit/audit_log.proto", + "proto-google-common-protos/src/main/proto/google/cloud/audit/bigquery_audit_metadata.proto", "proto-google-common-protos/src/main/proto/google/geo/type/viewport.proto", "proto-google-common-protos/src/main/proto/google/logging/type/http_request.proto", "proto-google-common-protos/src/main/proto/google/logging/type/log_severity.proto",