Skip to content

Commit

Permalink
feat(dataproc): update the API
Browse files Browse the repository at this point in the history
#### dataproc:v1
The following keys were added:
- schemas.ClusterConfig.properties.endpointConfig.$ref
- schemas.ClusterConfig.properties.endpointConfig.description
- schemas.ClusterConfig.properties.tempBucket.description
- schemas.ClusterConfig.properties.tempBucket.type
- schemas.EndpointConfig.description
- schemas.EndpointConfig.id
- schemas.EndpointConfig.properties.enableHttpPortAccess.description
- schemas.EndpointConfig.properties.enableHttpPortAccess.type
- schemas.EndpointConfig.properties.httpPorts.additionalProperties.type
- schemas.EndpointConfig.properties.httpPorts.description
- schemas.EndpointConfig.properties.httpPorts.type
- schemas.EndpointConfig.type

The following keys were changed:
- schemas.JobReference.properties.projectId.description
- schemas.SoftwareConfig.properties.optionalComponents.enumDescriptions
- schemas.WorkflowTemplate.properties.parameters.description

#### dataproc:v1beta2
The following keys were added:
- schemas.ClusterConfig.properties.tempBucket.description
- schemas.ClusterConfig.properties.tempBucket.type
- schemas.WorkflowMetadata.properties.dagEndTime.description
- schemas.WorkflowMetadata.properties.dagEndTime.format
- schemas.WorkflowMetadata.properties.dagEndTime.type
- schemas.WorkflowMetadata.properties.dagStartTime.description
- schemas.WorkflowMetadata.properties.dagStartTime.format
- schemas.WorkflowMetadata.properties.dagStartTime.type
- schemas.WorkflowMetadata.properties.dagTimeout.description
- schemas.WorkflowMetadata.properties.dagTimeout.format
- schemas.WorkflowMetadata.properties.dagTimeout.type
- schemas.WorkflowTemplate.properties.dagTimeout.description
- schemas.WorkflowTemplate.properties.dagTimeout.format
- schemas.WorkflowTemplate.properties.dagTimeout.type

The following keys were changed:
- schemas.JobReference.properties.projectId.description
- schemas.SoftwareConfig.properties.optionalComponents.enumDescriptions
- schemas.WorkflowTemplate.description
  • Loading branch information
yoshi-automation authored and JustinBeckwith committed Jul 10, 2020
1 parent 00012b6 commit 2ff7db7
Show file tree
Hide file tree
Showing 4 changed files with 115 additions and 12 deletions.
34 changes: 30 additions & 4 deletions discovery/dataproc-v1.json
Expand Up @@ -2129,7 +2129,7 @@
}
}
},
"revision": "20200528",
"revision": "20200619",
"rootUrl": "https://dataproc.googleapis.com/",
"schemas": {
"AcceleratorConfig": {
Expand Down Expand Up @@ -2322,6 +2322,10 @@
"$ref": "EncryptionConfig",
"description": "Optional. Encryption settings for the cluster."
},
"endpointConfig": {
"$ref": "EndpointConfig",
"description": "Optional. Port/endpoint configuration for this cluster"
},
"gceClusterConfig": {
"$ref": "GceClusterConfig",
"description": "Optional. The shared Compute Engine config settings for all instances in a cluster."
Expand Down Expand Up @@ -2353,6 +2357,10 @@
"$ref": "SoftwareConfig",
"description": "Optional. The config settings for software inside the cluster."
},
"tempBucket": {
"description": "Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.",
"type": "string"
},
"workerConfig": {
"$ref": "InstanceGroupConfig",
"description": "Optional. The Compute Engine config settings for worker instances in a cluster."
Expand Down Expand Up @@ -2609,6 +2617,24 @@
},
"type": "object"
},
"EndpointConfig": {
"description": "Endpoint config for this cluster",
"id": "EndpointConfig",
"properties": {
"enableHttpPortAccess": {
"description": "Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false.",
"type": "boolean"
},
"httpPorts": {
"additionalProperties": {
"type": "string"
},
"description": "Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.",
"type": "object"
}
},
"type": "object"
},
"Expr": {
"description": "Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec.Example (Comparison):\ntitle: \"Summary size limit\"\ndescription: \"Determines if a summary is less than 100 chars\"\nexpression: \"document.summary.size() < 100\"\nExample (Equality):\ntitle: \"Requestor is owner\"\ndescription: \"Determines if requestor is the document owner\"\nexpression: \"document.owner == request.auth.claims.email\"\nExample (Logic):\ntitle: \"Public documents\"\ndescription: \"Determine whether the document should be publicly visible\"\nexpression: \"document.type != 'private' && document.type != 'internal'\"\nExample (Data Manipulation):\ntitle: \"Notification string\"\ndescription: \"Create a notification string with a timestamp.\"\nexpression: \"'New message received at ' + string(document.create_time)\"\nThe exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.",
"id": "Expr",
Expand Down Expand Up @@ -3050,7 +3076,7 @@
"type": "string"
},
"projectId": {
"description": "Required. The ID of the Google Cloud Platform project that the job belongs to.",
"description": "Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID.",
"type": "string"
}
},
Expand Down Expand Up @@ -3763,7 +3789,7 @@
"optionalComponents": {
"description": "Optional. The set of components to activate on the cluster.",
"enumDescriptions": [
"Unspecified component.",
"Unspecified component. Specifying this will cause Cluster creation to fail.",
"The Anaconda python distribution.",
"The Hive Web HCatalog (the REST service for accessing HCatalog).",
"The Jupyter Notebook.",
Expand Down Expand Up @@ -4202,7 +4228,7 @@
"type": "string"
},
"parameters": {
"description": "Optional. emplate parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.",
"description": "Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.",
"items": {
"$ref": "TemplateParameter"
},
Expand Down
32 changes: 28 additions & 4 deletions discovery/dataproc-v1beta2.json
Expand Up @@ -2242,7 +2242,7 @@
}
}
},
"revision": "20200528",
"revision": "20200619",
"rootUrl": "https://dataproc.googleapis.com/",
"schemas": {
"AcceleratorConfig": {
Expand Down Expand Up @@ -2474,6 +2474,10 @@
"$ref": "SoftwareConfig",
"description": "Optional. The config settings for software inside the cluster."
},
"tempBucket": {
"description": "Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.",
"type": "string"
},
"workerConfig": {
"$ref": "InstanceGroupConfig",
"description": "Optional. The Compute Engine config settings for worker instances in a cluster."
Expand Down Expand Up @@ -3214,7 +3218,7 @@
"type": "string"
},
"projectId": {
"description": "Required. The ID of the Google Cloud Platform project that the job belongs to.",
"description": "Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID.",
"type": "string"
}
},
Expand Down Expand Up @@ -3942,7 +3946,7 @@
"optionalComponents": {
"description": "The set of optional components to activate on the cluster.",
"enumDescriptions": [
"Unspecified component.",
"Unspecified component. Specifying this will cause Cluster creation to fail.",
"The Anaconda python distribution.",
"The Druid query engine.",
"HBase.",
Expand Down Expand Up @@ -4291,6 +4295,21 @@
"$ref": "ClusterOperation",
"description": "Output only. The create cluster operation metadata."
},
"dagEndTime": {
"description": "Output only. DAG end time, only set for workflows with dag_timeout when DAG ends.",
"format": "google-datetime",
"type": "string"
},
"dagStartTime": {
"description": "Output only. DAG start time, only set for workflows with dag_timeout when DAG begins.",
"format": "google-datetime",
"type": "string"
},
"dagTimeout": {
"description": "Output only. The timeout duration for the DAG of jobs. Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed as a google.protobuf.Duration. For example, \"1800\" = 1800 seconds/30 minutes duration.",
"format": "google-duration",
"type": "string"
},
"deleteCluster": {
"$ref": "ClusterOperation",
"description": "Output only. The delete cluster operation metadata."
Expand Down Expand Up @@ -4391,14 +4410,19 @@
"type": "object"
},
"WorkflowTemplate": {
"description": "A Dataproc workflow template resource. Next ID: 11",
"description": "A Dataproc workflow template resource.",
"id": "WorkflowTemplate",
"properties": {
"createTime": {
"description": "Output only. The time template was created.",
"format": "google-datetime",
"type": "string"
},
"dagTimeout": {
"description": "Optional. Timeout for DAG of jobs. The timer begins when the first job is submitted. Minimum duration of 10 minutes, max of 24 hours.",
"format": "google-duration",
"type": "string"
},
"id": {
"description": "Required. The template id.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters..",
"type": "string"
Expand Down
25 changes: 23 additions & 2 deletions src/apis/dataproc/v1.ts
Expand Up @@ -283,6 +283,10 @@ export namespace dataproc_v1 {
* Optional. Encryption settings for the cluster.
*/
encryptionConfig?: Schema$EncryptionConfig;
/**
* Optional. Port/endpoint configuration for this cluster
*/
endpointConfig?: Schema$EndpointConfig;
/**
* Optional. The shared Compute Engine config settings for all instances in a cluster.
*/
Expand Down Expand Up @@ -311,6 +315,10 @@ export namespace dataproc_v1 {
* Optional. The config settings for software inside the cluster.
*/
softwareConfig?: Schema$SoftwareConfig;
/**
* Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster&#39;s temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
*/
tempBucket?: string | null;
/**
* Optional. The Compute Engine config settings for worker instances in a cluster.
*/
Expand Down Expand Up @@ -481,6 +489,19 @@ export namespace dataproc_v1 {
*/
gcePdKmsKeyName?: string | null;
}
/**
* Endpoint config for this cluster
*/
export interface Schema$EndpointConfig {
/**
* Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false.
*/
enableHttpPortAccess?: boolean | null;
/**
* Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.
*/
httpPorts?: {[key: string]: string} | null;
}
/**
* Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec.Example (Comparison): title: &quot;Summary size limit&quot; description: &quot;Determines if a summary is less than 100 chars&quot; expression: &quot;document.summary.size() &lt; 100&quot; Example (Equality): title: &quot;Requestor is owner&quot; description: &quot;Determines if requestor is the document owner&quot; expression: &quot;document.owner == request.auth.claims.email&quot; Example (Logic): title: &quot;Public documents&quot; description: &quot;Determine whether the document should be publicly visible&quot; expression: &quot;document.type != &#39;private&#39; &amp;&amp; document.type != &#39;internal&#39;&quot; Example (Data Manipulation): title: &quot;Notification string&quot; description: &quot;Create a notification string with a timestamp.&quot; expression: &quot;&#39;New message received at &#39; + string(document.create_time)&quot; The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.
*/
Expand Down Expand Up @@ -830,7 +851,7 @@ export namespace dataproc_v1 {
*/
jobId?: string | null;
/**
* Required. The ID of the Google Cloud Platform project that the job belongs to.
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID.
*/
projectId?: string | null;
}
Expand Down Expand Up @@ -1626,7 +1647,7 @@ export namespace dataproc_v1 {
*/
name?: string | null;
/**
* Optional. emplate parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
* Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.
*/
parameters?: Schema$TemplateParameter[];
/**
Expand Down
36 changes: 34 additions & 2 deletions src/apis/dataproc/v1beta2.ts
Expand Up @@ -319,6 +319,10 @@ export namespace dataproc_v1beta2 {
* Optional. The config settings for software inside the cluster.
*/
softwareConfig?: Schema$SoftwareConfig;
/**
* Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster&#39;s temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.
*/
tempBucket?: string | null;
/**
* Optional. The Compute Engine config settings for worker instances in a cluster.
*/
Expand Down Expand Up @@ -868,7 +872,7 @@ export namespace dataproc_v1beta2 {
*/
jobId?: string | null;
/**
* Required. The ID of the Google Cloud Platform project that the job belongs to.
* Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID.
*/
projectId?: string | null;
}
Expand Down Expand Up @@ -1623,6 +1627,18 @@ export namespace dataproc_v1beta2 {
* Output only. The create cluster operation metadata.
*/
createCluster?: Schema$ClusterOperation;
/**
* Output only. DAG end time, only set for workflows with dag_timeout when DAG ends.
*/
dagEndTime?: string | null;
/**
* Output only. DAG start time, only set for workflows with dag_timeout when DAG begins.
*/
dagStartTime?: string | null;
/**
* Output only. The timeout duration for the DAG of jobs. Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed as a google.protobuf.Duration. For example, &quot;1800&quot; = 1800 seconds/30 minutes duration.
*/
dagTimeout?: string | null;
/**
* Output only. The delete cluster operation metadata.
*/
Expand Down Expand Up @@ -1682,13 +1698,17 @@ export namespace dataproc_v1beta2 {
stepId?: string | null;
}
/**
* A Dataproc workflow template resource. Next ID: 11
* A Dataproc workflow template resource.
*/
export interface Schema$WorkflowTemplate {
/**
* Output only. The time template was created.
*/
createTime?: string | null;
/**
* Optional. Timeout for DAG of jobs. The timer begins when the first job is submitted. Minimum duration of 10 minutes, max of 24 hours.
*/
dagTimeout?: string | null;
/**
* Required. The template id.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters..
*/
Expand Down Expand Up @@ -3076,6 +3096,7 @@ export namespace dataproc_v1beta2 {
* // request body parameters
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand All @@ -3092,6 +3113,7 @@ export namespace dataproc_v1beta2 {
* // Example response
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -3369,6 +3391,7 @@ export namespace dataproc_v1beta2 {
* // Example response
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -3800,6 +3823,7 @@ export namespace dataproc_v1beta2 {
* // request body parameters
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -4399,6 +4423,7 @@ export namespace dataproc_v1beta2 {
* // request body parameters
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand All @@ -4415,6 +4440,7 @@ export namespace dataproc_v1beta2 {
* // Example response
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -10603,6 +10629,7 @@ export namespace dataproc_v1beta2 {
* // request body parameters
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand All @@ -10619,6 +10646,7 @@ export namespace dataproc_v1beta2 {
* // Example response
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -10896,6 +10924,7 @@ export namespace dataproc_v1beta2 {
* // Example response
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -11327,6 +11356,7 @@ export namespace dataproc_v1beta2 {
* // request body parameters
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down Expand Up @@ -11926,6 +11956,7 @@ export namespace dataproc_v1beta2 {
* // request body parameters
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand All @@ -11942,6 +11973,7 @@ export namespace dataproc_v1beta2 {
* // Example response
* // {
* // "createTime": "my_createTime",
* // "dagTimeout": "my_dagTimeout",
* // "id": "my_id",
* // "jobs": [],
* // "labels": {},
Expand Down

0 comments on commit 2ff7db7

Please sign in to comment.