Skip to content

Commit

Permalink
feat(bigquery): update the API
Browse files Browse the repository at this point in the history
#### bigquery:v2

The following keys were added:
- schemas.TrainingOptions.properties.boosterType.description
- schemas.TrainingOptions.properties.boosterType.enum
- schemas.TrainingOptions.properties.boosterType.enumDescriptions
- schemas.TrainingOptions.properties.boosterType.type
- schemas.TrainingOptions.properties.colsampleBylevel.description
- schemas.TrainingOptions.properties.colsampleBylevel.format
- schemas.TrainingOptions.properties.colsampleBylevel.type
- schemas.TrainingOptions.properties.colsampleBynode.description
- schemas.TrainingOptions.properties.colsampleBynode.format
- schemas.TrainingOptions.properties.colsampleBynode.type
- schemas.TrainingOptions.properties.colsampleBytree.description
- schemas.TrainingOptions.properties.colsampleBytree.format
- schemas.TrainingOptions.properties.colsampleBytree.type
- schemas.TrainingOptions.properties.dartNormalizeType.description
- schemas.TrainingOptions.properties.dartNormalizeType.enum
- schemas.TrainingOptions.properties.dartNormalizeType.enumDescriptions
- schemas.TrainingOptions.properties.dartNormalizeType.type
- schemas.TrainingOptions.properties.minTreeChildWeight.description
- schemas.TrainingOptions.properties.minTreeChildWeight.format
- schemas.TrainingOptions.properties.minTreeChildWeight.type
- schemas.TrainingOptions.properties.numParallelTree.description
- schemas.TrainingOptions.properties.numParallelTree.format
- schemas.TrainingOptions.properties.numParallelTree.type
- schemas.TrainingOptions.properties.treeMethod.description
- schemas.TrainingOptions.properties.treeMethod.enum
- schemas.TrainingOptions.properties.treeMethod.enumDescriptions
- schemas.TrainingOptions.properties.treeMethod.type

The following keys were changed:
- resources.jobs.methods.delete.description
- resources.jobs.methods.delete.parameters.jobId.description
- resources.jobs.methods.delete.parameters.projectId.description
- schemas.ArimaForecastingMetrics.properties.seasonalPeriods.items.enumDescriptions
- schemas.ArimaModelInfo.properties.seasonalPeriods.items.enumDescriptions
- schemas.ArimaResult.properties.seasonalPeriods.items.enumDescriptions
- schemas.ArimaSingleModelForecastingMetrics.properties.seasonalPeriods.items.enumDescriptions
  • Loading branch information
yoshi-automation authored and JustinBeckwith committed Sep 28, 2021
1 parent 087c8de commit e89a3f3
Show file tree
Hide file tree
Showing 2 changed files with 116 additions and 13 deletions.
87 changes: 79 additions & 8 deletions discovery/bigquery-v2.json
Expand Up @@ -338,7 +338,7 @@
]
},
"delete": {
"description": "Requests that a job is deleted. This call will return when the job is deleted. This method is available in limited preview.",
"description": "Requests the deletion of the metadata of a job. This call returns when the job's metadata is deleted.",
"flatPath": "projects/{projectsId}/jobs/{jobsId}/delete",
"httpMethod": "DELETE",
"id": "bigquery.jobs.delete",
Expand All @@ -348,7 +348,7 @@
],
"parameters": {
"jobId": {
"description": "Required. Job ID of the job to be deleted. If this is a parent job which has child jobs, all child jobs will be deleted as well. Deletion of child jobs directly is not allowed.",
"description": "Required. Job ID of the job for which metadata is to be deleted. If this is a parent job which has child jobs, the metadata from all child jobs will be deleted as well. Direct deletion of the metadata of child jobs is not allowed.",
"location": "path",
"pattern": "^[^/]+$",
"required": true,
Expand All @@ -360,7 +360,7 @@
"type": "string"
},
"projectId": {
"description": "Required. Project ID of the job to be deleted.",
"description": "Required. Project ID of the job for which metadata is to be deleted.",
"location": "path",
"pattern": "^[^/]+$",
"required": true,
Expand Down Expand Up @@ -1683,7 +1683,7 @@
}
}
},
"revision": "20210904",
"revision": "20210919",
"rootUrl": "https://bigquery.googleapis.com/",
"schemas": {
"AggregateClassificationMetrics": {
Expand Down Expand Up @@ -1868,7 +1868,7 @@
"YEARLY"
],
"enumDescriptions": [
"",
"Unspecified seasonal period.",
"No seasonality",
"Daily period, 24 hours.",
"Weekly period, 7 days.",
Expand Down Expand Up @@ -1935,7 +1935,7 @@
"YEARLY"
],
"enumDescriptions": [
"",
"Unspecified seasonal period.",
"No seasonality",
"Daily period, 24 hours.",
"Weekly period, 7 days.",
Expand Down Expand Up @@ -2007,7 +2007,7 @@
"YEARLY"
],
"enumDescriptions": [
"",
"Unspecified seasonal period.",
"No seasonality",
"Daily period, 24 hours.",
"Weekly period, 7 days.",
Expand Down Expand Up @@ -2063,7 +2063,7 @@
"YEARLY"
],
"enumDescriptions": [
"",
"Unspecified seasonal period.",
"No seasonality",
"Daily period, 24 hours.",
"Weekly period, 7 days.",
Expand Down Expand Up @@ -6087,10 +6087,53 @@
"format": "int64",
"type": "string"
},
"boosterType": {
"description": "Booster type for boosted tree models.",
"enum": [
"BOOSTER_TYPE_UNSPECIFIED",
"GBTREE",
"DART"
],
"enumDescriptions": [
"Unspecified booster type.",
"Gbtree booster.",
"Dart booster."
],
"type": "string"
},
"cleanSpikesAndDips": {
"description": "If true, clean spikes and dips in the input time series.",
"type": "boolean"
},
"colsampleBylevel": {
"description": "Subsample ratio of columns for each level for boosted tree models.",
"format": "double",
"type": "number"
},
"colsampleBynode": {
"description": "Subsample ratio of columns for each node(split) for boosted tree models.",
"format": "double",
"type": "number"
},
"colsampleBytree": {
"description": "Subsample ratio of columns when constructing each tree for boosted tree models.",
"format": "double",
"type": "number"
},
"dartNormalizeType": {
"description": "Type of normalization algorithm for boosted tree models using dart booster.",
"enum": [
"DART_NORMALIZE_TYPE_UNSPECIFIED",
"TREE",
"FOREST"
],
"enumDescriptions": [
"Unspecified dart normalize type.",
"New trees have the same weight of each of dropped trees.",
"New trees have the same weight of sum of dropped trees."
],
"type": "string"
},
"dataFrequency": {
"description": "The data frequency of a time series.",
"enum": [
Expand Down Expand Up @@ -6457,6 +6500,11 @@
"format": "double",
"type": "number"
},
"minTreeChildWeight": {
"description": "Minimum sum of instance weight needed in a child for boosted tree models.",
"format": "int64",
"type": "string"
},
"modelUri": {
"description": "Google Cloud Storage URI from which the model was imported. Only applicable for imported models.",
"type": "string"
Expand All @@ -6475,6 +6523,11 @@
"format": "int64",
"type": "string"
},
"numParallelTree": {
"description": "Number of parallel trees constructed during each iteration for boosted tree models.",
"format": "int64",
"type": "string"
},
"optimizationStrategy": {
"description": "Optimization strategy for training linear regression models.",
"enum": [
Expand Down Expand Up @@ -6517,6 +6570,24 @@
"description": "Column to be designated as time series timestamp for ARIMA model.",
"type": "string"
},
"treeMethod": {
"description": "Tree construction algorithm for boosted tree models.",
"enum": [
"TREE_METHOD_UNSPECIFIED",
"AUTO",
"EXACT",
"APPROX",
"HIST"
],
"enumDescriptions": [
"Unspecified tree method.",
"Use heuristic to choose the fastest method.",
"Exact greedy algorithm.",
"Approximate greedy algorithm using quantile sketch and gradient histogram.",
"Fast histogram optimized approximate greedy algorithm."
],
"type": "string"
},
"userColumn": {
"description": "User column specified for matrix factorization models.",
"type": "string"
Expand Down
42 changes: 37 additions & 5 deletions src/apis/bigquery/v2.ts
Expand Up @@ -3061,10 +3061,30 @@ export namespace bigquery_v2 {
* Batch size for dnn models.
*/
batchSize?: string | null;
/**
* Booster type for boosted tree models.
*/
boosterType?: string | null;
/**
* If true, clean spikes and dips in the input time series.
*/
cleanSpikesAndDips?: boolean | null;
/**
* Subsample ratio of columns for each level for boosted tree models.
*/
colsampleBylevel?: number | null;
/**
* Subsample ratio of columns for each node(split) for boosted tree models.
*/
colsampleBynode?: number | null;
/**
* Subsample ratio of columns when constructing each tree for boosted tree models.
*/
colsampleBytree?: number | null;
/**
* Type of normalization algorithm for boosted tree models using dart booster.
*/
dartNormalizeType?: string | null;
/**
* The data frequency of a time series.
*/
Expand Down Expand Up @@ -3177,6 +3197,10 @@ export namespace bigquery_v2 {
* Minimum split loss for boosted tree models.
*/
minSplitLoss?: number | null;
/**
* Minimum sum of instance weight needed in a child for boosted tree models.
*/
minTreeChildWeight?: string | null;
/**
* Google Cloud Storage URI from which the model was imported. Only applicable for imported models.
*/
Expand All @@ -3193,6 +3217,10 @@ export namespace bigquery_v2 {
* Num factors specified for matrix factorization models.
*/
numFactors?: string | null;
/**
* Number of parallel trees constructed during each iteration for boosted tree models.
*/
numParallelTree?: string | null;
/**
* Optimization strategy for training linear regression models.
*/
Expand Down Expand Up @@ -3221,6 +3249,10 @@ export namespace bigquery_v2 {
* Column to be designated as time series timestamp for ARIMA model.
*/
timeSeriesTimestampColumn?: string | null;
/**
* Tree construction algorithm for boosted tree models.
*/
treeMethod?: string | null;
/**
* User column specified for matrix factorization models.
*/
Expand Down Expand Up @@ -4476,7 +4508,7 @@ export namespace bigquery_v2 {
}

/**
* Requests that a job is deleted. This call will return when the job is deleted. This method is available in limited preview.
* Requests the deletion of the metadata of a job. This call returns when the job's metadata is deleted.
* @example
* ```js
* // Before running the sample:
Expand Down Expand Up @@ -4505,11 +4537,11 @@ export namespace bigquery_v2 {
*
* // Do the magic
* const res = await bigquery.jobs.delete({
* // Required. Job ID of the job to be deleted. If this is a parent job which has child jobs, all child jobs will be deleted as well. Deletion of child jobs directly is not allowed.
* // Required. Job ID of the job for which metadata is to be deleted. If this is a parent job which has child jobs, the metadata from all child jobs will be deleted as well. Direct deletion of the metadata of child jobs is not allowed.
* jobId: '[^/]+',
* // The geographic location of the job. Required. See details at: https://cloud.google.com/bigquery/docs/locations#specifying_your_location.
* location: 'placeholder-value',
* // Required. Project ID of the job to be deleted.
* // Required. Project ID of the job for which metadata is to be deleted.
* projectId: '[^/]+',
* });
* console.log(res.data);
Expand Down Expand Up @@ -5408,15 +5440,15 @@ export namespace bigquery_v2 {
}
export interface Params$Resource$Jobs$Delete extends StandardParameters {
/**
* Required. Job ID of the job to be deleted. If this is a parent job which has child jobs, all child jobs will be deleted as well. Deletion of child jobs directly is not allowed.
* Required. Job ID of the job for which metadata is to be deleted. If this is a parent job which has child jobs, the metadata from all child jobs will be deleted as well. Direct deletion of the metadata of child jobs is not allowed.
*/
jobId?: string;
/**
* The geographic location of the job. Required. See details at: https://cloud.google.com/bigquery/docs/locations#specifying_your_location.
*/
location?: string;
/**
* Required. Project ID of the job to be deleted.
* Required. Project ID of the job for which metadata is to be deleted.
*/
projectId?: string;
}
Expand Down

0 comments on commit e89a3f3

Please sign in to comment.