Skip to content
This repository has been archived by the owner on Dec 31, 2023. It is now read-only.

Commit

Permalink
feat: Switch to string enums for compute (#685) (#158)
Browse files Browse the repository at this point in the history
* feat: Switch to string enums for compute (#685)

Also introduce gapic_yaml for java clients to override default LRO configuration (which has too way too long timeouts).

Also cleanup regenerate other files (grpc_service_config and service yaml)
Source-Link: googleapis/googleapis@8ce4ea6

Source-Link: googleapis/googleapis-gen@55e242c
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTVlMjQyYzdjZjVlNjAwZmFiYjhkNzY3YmQwNmY0ZmRmYWQ2YTAxNSJ9

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* run blacken on all directories with a noxfile

* chore: Update integration tests to reflect API changes

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* chore: Update integration test to reflect API changes

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: Anthonios Partheniou <partheniou@google.com>
  • Loading branch information
3 people committed Nov 29, 2021
1 parent 34092b5 commit 6cfa01e
Show file tree
Hide file tree
Showing 72 changed files with 1,227 additions and 1,196 deletions.
1,522 changes: 783 additions & 739 deletions google/cloud/compute_v1/types/compute.py

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions owlbot.py
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from pathlib import Path
import synthtool as s
import synthtool.gcp as gcp
from synthtool.languages import python
Expand Down Expand Up @@ -57,7 +58,8 @@
python.py_samples(skip_readmes=True)

# ----------------------------------------------------------------------------
# Run blacken session
# Run blacken session for all directories with a noxfile
# ----------------------------------------------------------------------------

s.shell.run(["nox", "-s", "blacken"], hide_output=False)
for noxfile in Path(".").glob("**/noxfile.py"):
s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False)
2 changes: 1 addition & 1 deletion samples/snippets/noxfile_config.py
Expand Up @@ -14,5 +14,5 @@

TEST_CONFIG_OVERRIDE = {
# Tests in test_sample_default_values.py require separate projects to not interfere with each other.
'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
"gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
}
16 changes: 14 additions & 2 deletions samples/snippets/quickstart.py
Expand Up @@ -56,6 +56,8 @@ def list_instances(project_id: str, zone: str) -> typing.Iterable[compute_v1.Ins
print(f" - {instance.name} ({instance.machine_type})")

return instance_list


# [END compute_instances_list]


Expand All @@ -74,7 +76,9 @@ def list_all_instances(
"""
instance_client = compute_v1.InstancesClient()
# Use the `max_results` parameter to limit the number of results that the API returns per response page.
request = compute_v1.AggregatedListInstancesRequest(project=project_id, max_results=5)
request = compute_v1.AggregatedListInstancesRequest(
project=project_id, max_results=5
)
agg_list = instance_client.aggregated_list(request=request)
all_instances = {}
print("Instances found:")
Expand All @@ -88,6 +92,8 @@ def list_all_instances(
for instance in response.instances:
print(f" - {instance.name} ({instance.machine_type})")
return all_instances


# [END compute_instances_list_all]


Expand Down Expand Up @@ -133,7 +139,7 @@ def create_instance(
disk.initialize_params = initialize_params
disk.auto_delete = True
disk.boot = True
disk.type_ = compute_v1.AttachedDisk.Type.PERSISTENT
disk.type_ = "PERSISTENT"

# Use the network interface provided in the network_name argument.
network_interface = compute_v1.NetworkInterface()
Expand Down Expand Up @@ -166,6 +172,8 @@ def create_instance(
print("Warning during creation:", operation.warnings, file=sys.stderr)
print(f"Instance {instance_name} created.")
return instance


# [END compute_instances_create]


Expand Down Expand Up @@ -196,6 +204,8 @@ def delete_instance(project_id: str, zone: str, machine_name: str) -> None:
print("Warning during deletion:", operation.warnings, file=sys.stderr)
print(f"Instance {machine_name} deleted.")
return


# [END compute_instances_delete]


Expand Down Expand Up @@ -227,6 +237,8 @@ def wait_for_operation(
else:
client = compute_v1.GlobalOperationsClient()
return client.wait(**kwargs)


# [END compute_instances_operation_check]


Expand Down
42 changes: 25 additions & 17 deletions samples/snippets/sample_default_values.py
Expand Up @@ -22,14 +22,16 @@
# [START compute_usage_report_get]
# [START compute_usage_report_disable]
from google.cloud import compute_v1

# [END compute_usage_report_disable]
# [END compute_usage_report_get]
# [END compute_usage_report_set]


# [START compute_usage_report_set]
def set_usage_export_bucket(project_id: str, bucket_name: str,
report_name_prefix: str = "") -> None:
def set_usage_export_bucket(
project_id: str, bucket_name: str, report_name_prefix: str = ""
) -> None:
"""
Set Compute Engine usage export bucket for the Cloud project.
This sample presents how to interpret the default value for the
Expand All @@ -43,27 +45,28 @@ def set_usage_export_bucket(project_id: str, bucket_name: str,
to showcase default values behaviour.
"""
usage_export_location = compute_v1.UsageExportLocation(
bucket_name=bucket_name,
report_name_prefix=report_name_prefix
bucket_name=bucket_name, report_name_prefix=report_name_prefix
)

if not report_name_prefix:
# Sending an empty value for report_name_prefix results in the
# next usage report being generated with the default prefix value
# "usage_gce". (ref: https://cloud.google.com/compute/docs/reference/rest/v1/projects/setUsageExportBucket)
print("Setting report_name_prefix to empty value causes the report "
"to have the default prefix of `usage_gce`.")
print(
"Setting report_name_prefix to empty value causes the report "
"to have the default prefix of `usage_gce`."
)

projects_client = compute_v1.ProjectsClient()
operation = projects_client.set_usage_export_bucket(
project=project_id, usage_export_location_resource=usage_export_location)
project=project_id, usage_export_location_resource=usage_export_location
)

op_client = compute_v1.GlobalOperationsClient()

while operation.status != compute_v1.Operation.Status.DONE:
operation = op_client.wait(
operation=operation.name, project=project_id
)
operation = op_client.wait(operation=operation.name, project=project_id)


# [END compute_usage_report_set]

Expand Down Expand Up @@ -94,10 +97,14 @@ def get_usage_export_bucket(project_id: str) -> compute_v1.UsageExportLocation:
# Although the server sent the empty string value, the next usage report
# generated with these settings still has the default prefix value
# "usage_gce". (see https://cloud.google.com/compute/docs/reference/rest/v1/projects/get)
print('Report name prefix not set, replacing with default value of '
'`usage_gce`.')
uel.report_name_prefix = 'usage_gce'
print(
"Report name prefix not set, replacing with default value of "
"`usage_gce`."
)
uel.report_name_prefix = "usage_gce"
return uel


# [END compute_usage_report_get]
# [END compute_instances_verify_default_value]

Expand All @@ -115,12 +122,13 @@ def disable_usage_export(project_id: str) -> None:
# Setting `usage_export_location_resource` to an
# empty object will disable the usage report generation.
operation = projects_client.set_usage_export_bucket(
project=project_id, usage_export_location_resource={})
project=project_id, usage_export_location_resource={}
)

op_client = compute_v1.GlobalOperationsClient()

while operation.status != compute_v1.Operation.Status.DONE:
operation = op_client.wait(
operation=operation.name, project=project_id
)
operation = op_client.wait(operation=operation.name, project=project_id)


# [END compute_usage_report_disable]
13 changes: 11 additions & 2 deletions samples/snippets/sample_firewall.py
Expand Up @@ -20,6 +20,7 @@
# [START compute_firewall_patch]
# [START compute_firewall_delete]
import google.cloud.compute_v1 as compute_v1

# [END compute_firewall_delete]
# [END compute_firewall_patch]
# [END compute_firewall_create]
Expand All @@ -45,6 +46,8 @@ def list_firewall_rules(project_id: str) -> Iterable:
print(f" - {firewall.name}: {firewall.description}")

return firewalls_list


# [END compute_firewall_list]


Expand All @@ -70,7 +73,7 @@ def create_firewall_rule(
"""
firewall_rule = compute_v1.Firewall()
firewall_rule.name = firewall_rule_name
firewall_rule.direction = compute_v1.Firewall.Direction.INGRESS
firewall_rule.direction = "INGRESS"

allowed_ports = compute_v1.Allowed()
allowed_ports.I_p_protocol = "tcp"
Expand All @@ -81,7 +84,7 @@ def create_firewall_rule(
firewall_rule.network = network
firewall_rule.description = "Allowing TCP traffic on port 80 and 443 from Internet."

firewall_rule.target_tags = ['web']
firewall_rule.target_tags = ["web"]

# Note that the default value of priority for the firewall API is 1000.
# If you check the value of `firewall_rule.priority` at this point it
Expand All @@ -98,6 +101,8 @@ def create_firewall_rule(
op_client.wait(project=project_id, operation=op.name)

return


# [END compute_firewall_create]


Expand All @@ -124,6 +129,8 @@ def patch_firewall_priority(project_id: str, firewall_rule_name: str, priority:
operation_client = compute_v1.GlobalOperationsClient()
operation_client.wait(project=project_id, operation=operation.name)
return


# [END compute_firewall_patch]


Expand All @@ -142,6 +149,8 @@ def delete_firewall_rule(project_id: str, firewall_rule_name: str):
operation_client = compute_v1.GlobalOperationsClient()
operation_client.wait(project=project_id, operation=operation.name)
return


# [END compute_firewall_delete]


Expand Down
3 changes: 2 additions & 1 deletion samples/snippets/sample_instance_from_template.py
Expand Up @@ -15,6 +15,7 @@
# [START compute_instances_create_from_template]
# [START compute_instances_create_from_template_with_overrides]
from google.cloud import compute_v1

# [END compute_instances_create_from_template_with_overrides]


Expand Down Expand Up @@ -116,7 +117,7 @@ def create_instance_from_template_with_overrides(
new_disk.initialize_params.source_image = new_disk_source_image
new_disk.auto_delete = True
new_disk.boot = False
new_disk.type_ = compute_v1.AttachedDisk.Type.PERSISTENT
new_disk.type_ = "PERSISTENT"

instance.disks.append(new_disk)

Expand Down
25 changes: 17 additions & 8 deletions samples/snippets/sample_pagination.py
Expand Up @@ -17,6 +17,7 @@
# [START compute_images_list_page]
# [START compute_images_list]
import google.cloud.compute_v1 as compute_v1

# [END compute_images_list]
# [END compute_images_list_page]

Expand All @@ -34,14 +35,17 @@ def print_images_list(project: str) -> None:
"""
images_client = compute_v1.ImagesClient()
# Listing only non-deprecated images to reduce the size of the reply.
images_list_request = compute_v1.ListImagesRequest(project=project, max_results=100,
filter="deprecated.state != DEPRECATED")
images_list_request = compute_v1.ListImagesRequest(
project=project, max_results=100, filter="deprecated.state != DEPRECATED"
)

# Although the `max_results` parameter is specified in the request, the iterable returned
# by the `list()` method hides the pagination mechanic. The library makes multiple
# requests to the API for you, so you can simply iterate over all the images.
for img in images_client.list(request=images_list_request):
print(f" - {img.name}")


# [END compute_images_list]


Expand All @@ -60,21 +64,26 @@ def print_images_list_by_page(project: str, page_size: int = 10) -> None:
"""
images_client = compute_v1.ImagesClient()
# Listing only non-deprecated images to reduce the size of the reply.
images_list_request = compute_v1.ListImagesRequest(project=project, max_results=page_size,
filter="deprecated.state != DEPRECATED")
images_list_request = compute_v1.ListImagesRequest(
project=project, max_results=page_size, filter="deprecated.state != DEPRECATED"
)

# Use the `pages` attribute of returned iterable to have more granular control of
# iteration over paginated results from the API. Each time you want to access the
# next page, the library retrieves that page from the API.
for page_num, page in enumerate(images_client.list(request=images_list_request).pages, start=1):
for page_num, page in enumerate(
images_client.list(request=images_list_request).pages, start=1
):
print(f"Page {page_num}: ")
for img in page.items:
print(f" - {img.name}")


# [END compute_images_list_page]


if __name__ == '__main__':
if __name__ == "__main__":
print("=================== Flat list of images ===================")
print_images_list('windows-sql-cloud')
print_images_list("windows-sql-cloud")
print("================= Paginated list of images ================")
print_images_list_by_page('windows-sql-cloud', 5)
print_images_list_by_page("windows-sql-cloud", 5)

0 comments on commit 6cfa01e

Please sign in to comment.