diff --git a/docs/index.rst b/docs/index.rst index 137f86d..df112c4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,6 +2,17 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of the Notebooks +API. By default, you will get ``v1``, the latest version. + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + notebooks_v1/services + notebooks_v1/types + API Reference ------------- .. toctree:: diff --git a/docs/notebooks_v1/managed_notebook_service.rst b/docs/notebooks_v1/managed_notebook_service.rst new file mode 100644 index 0000000..73e8748 --- /dev/null +++ b/docs/notebooks_v1/managed_notebook_service.rst @@ -0,0 +1,10 @@ +ManagedNotebookService +---------------------------------------- + +.. automodule:: google.cloud.notebooks_v1.services.managed_notebook_service + :members: + :inherited-members: + +.. automodule:: google.cloud.notebooks_v1.services.managed_notebook_service.pagers + :members: + :inherited-members: diff --git a/docs/notebooks_v1/notebook_service.rst b/docs/notebooks_v1/notebook_service.rst new file mode 100644 index 0000000..f6d523e --- /dev/null +++ b/docs/notebooks_v1/notebook_service.rst @@ -0,0 +1,10 @@ +NotebookService +--------------------------------- + +.. automodule:: google.cloud.notebooks_v1.services.notebook_service + :members: + :inherited-members: + +.. automodule:: google.cloud.notebooks_v1.services.notebook_service.pagers + :members: + :inherited-members: diff --git a/docs/notebooks_v1/services.rst b/docs/notebooks_v1/services.rst new file mode 100644 index 0000000..f0543ab --- /dev/null +++ b/docs/notebooks_v1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Notebooks v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + managed_notebook_service + notebook_service diff --git a/docs/notebooks_v1/types.rst b/docs/notebooks_v1/types.rst new file mode 100644 index 0000000..657fd1f --- /dev/null +++ b/docs/notebooks_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Notebooks v1 API +======================================= + +.. automodule:: google.cloud.notebooks_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/google/cloud/notebooks/__init__.py b/google/cloud/notebooks/__init__.py index 2d3bf90..7d5ad8f 100644 --- a/google/cloud/notebooks/__init__.py +++ b/google/cloud/notebooks/__init__.py @@ -14,69 +14,163 @@ # limitations under the License. # -from google.cloud.notebooks_v1beta1.services.notebook_service.client import ( +from google.cloud.notebooks_v1.services.managed_notebook_service.client import ( + ManagedNotebookServiceClient, +) +from google.cloud.notebooks_v1.services.managed_notebook_service.async_client import ( + ManagedNotebookServiceAsyncClient, +) +from google.cloud.notebooks_v1.services.notebook_service.client import ( NotebookServiceClient, ) -from google.cloud.notebooks_v1beta1.services.notebook_service.async_client import ( +from google.cloud.notebooks_v1.services.notebook_service.async_client import ( NotebookServiceAsyncClient, ) -from google.cloud.notebooks_v1beta1.types.environment import ContainerImage -from google.cloud.notebooks_v1beta1.types.environment import Environment -from google.cloud.notebooks_v1beta1.types.environment import VmImage -from google.cloud.notebooks_v1beta1.types.instance import Instance -from google.cloud.notebooks_v1beta1.types.service import CreateEnvironmentRequest -from google.cloud.notebooks_v1beta1.types.service import CreateInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import DeleteEnvironmentRequest -from google.cloud.notebooks_v1beta1.types.service import DeleteInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import GetEnvironmentRequest -from google.cloud.notebooks_v1beta1.types.service import GetInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import IsInstanceUpgradeableRequest -from google.cloud.notebooks_v1beta1.types.service import IsInstanceUpgradeableResponse -from google.cloud.notebooks_v1beta1.types.service import ListEnvironmentsRequest -from google.cloud.notebooks_v1beta1.types.service import ListEnvironmentsResponse -from google.cloud.notebooks_v1beta1.types.service import ListInstancesRequest -from google.cloud.notebooks_v1beta1.types.service import ListInstancesResponse -from google.cloud.notebooks_v1beta1.types.service import OperationMetadata -from google.cloud.notebooks_v1beta1.types.service import RegisterInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import ReportInstanceInfoRequest -from google.cloud.notebooks_v1beta1.types.service import ResetInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import SetInstanceAcceleratorRequest -from google.cloud.notebooks_v1beta1.types.service import SetInstanceLabelsRequest -from google.cloud.notebooks_v1beta1.types.service import SetInstanceMachineTypeRequest -from google.cloud.notebooks_v1beta1.types.service import StartInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import StopInstanceRequest -from google.cloud.notebooks_v1beta1.types.service import UpgradeInstanceInternalRequest -from google.cloud.notebooks_v1beta1.types.service import UpgradeInstanceRequest +from google.cloud.notebooks_v1.types.environment import ContainerImage +from google.cloud.notebooks_v1.types.environment import Environment +from google.cloud.notebooks_v1.types.environment import VmImage +from google.cloud.notebooks_v1.types.event import Event +from google.cloud.notebooks_v1.types.execution import Execution +from google.cloud.notebooks_v1.types.execution import ExecutionTemplate +from google.cloud.notebooks_v1.types.instance import Instance +from google.cloud.notebooks_v1.types.instance import ReservationAffinity +from google.cloud.notebooks_v1.types.instance_config import InstanceConfig +from google.cloud.notebooks_v1.types.managed_service import CreateRuntimeRequest +from google.cloud.notebooks_v1.types.managed_service import DeleteRuntimeRequest +from google.cloud.notebooks_v1.types.managed_service import GetRuntimeRequest +from google.cloud.notebooks_v1.types.managed_service import ListRuntimesRequest +from google.cloud.notebooks_v1.types.managed_service import ListRuntimesResponse +from google.cloud.notebooks_v1.types.managed_service import ReportRuntimeEventRequest +from google.cloud.notebooks_v1.types.managed_service import ResetRuntimeRequest +from google.cloud.notebooks_v1.types.managed_service import StartRuntimeRequest +from google.cloud.notebooks_v1.types.managed_service import StopRuntimeRequest +from google.cloud.notebooks_v1.types.managed_service import SwitchRuntimeRequest +from google.cloud.notebooks_v1.types.runtime import EncryptionConfig +from google.cloud.notebooks_v1.types.runtime import LocalDisk +from google.cloud.notebooks_v1.types.runtime import LocalDiskInitializeParams +from google.cloud.notebooks_v1.types.runtime import Runtime +from google.cloud.notebooks_v1.types.runtime import RuntimeAcceleratorConfig +from google.cloud.notebooks_v1.types.runtime import RuntimeAccessConfig +from google.cloud.notebooks_v1.types.runtime import RuntimeMetrics +from google.cloud.notebooks_v1.types.runtime import RuntimeShieldedInstanceConfig +from google.cloud.notebooks_v1.types.runtime import RuntimeSoftwareConfig +from google.cloud.notebooks_v1.types.runtime import VirtualMachine +from google.cloud.notebooks_v1.types.runtime import VirtualMachineConfig +from google.cloud.notebooks_v1.types.schedule import Schedule +from google.cloud.notebooks_v1.types.service import CreateEnvironmentRequest +from google.cloud.notebooks_v1.types.service import CreateExecutionRequest +from google.cloud.notebooks_v1.types.service import CreateInstanceRequest +from google.cloud.notebooks_v1.types.service import CreateScheduleRequest +from google.cloud.notebooks_v1.types.service import DeleteEnvironmentRequest +from google.cloud.notebooks_v1.types.service import DeleteExecutionRequest +from google.cloud.notebooks_v1.types.service import DeleteInstanceRequest +from google.cloud.notebooks_v1.types.service import DeleteScheduleRequest +from google.cloud.notebooks_v1.types.service import GetEnvironmentRequest +from google.cloud.notebooks_v1.types.service import GetExecutionRequest +from google.cloud.notebooks_v1.types.service import GetInstanceHealthRequest +from google.cloud.notebooks_v1.types.service import GetInstanceHealthResponse +from google.cloud.notebooks_v1.types.service import GetInstanceRequest +from google.cloud.notebooks_v1.types.service import GetScheduleRequest +from google.cloud.notebooks_v1.types.service import IsInstanceUpgradeableRequest +from google.cloud.notebooks_v1.types.service import IsInstanceUpgradeableResponse +from google.cloud.notebooks_v1.types.service import ListEnvironmentsRequest +from google.cloud.notebooks_v1.types.service import ListEnvironmentsResponse +from google.cloud.notebooks_v1.types.service import ListExecutionsRequest +from google.cloud.notebooks_v1.types.service import ListExecutionsResponse +from google.cloud.notebooks_v1.types.service import ListInstancesRequest +from google.cloud.notebooks_v1.types.service import ListInstancesResponse +from google.cloud.notebooks_v1.types.service import ListSchedulesRequest +from google.cloud.notebooks_v1.types.service import ListSchedulesResponse +from google.cloud.notebooks_v1.types.service import OperationMetadata +from google.cloud.notebooks_v1.types.service import RegisterInstanceRequest +from google.cloud.notebooks_v1.types.service import ReportInstanceInfoRequest +from google.cloud.notebooks_v1.types.service import ResetInstanceRequest +from google.cloud.notebooks_v1.types.service import RollbackInstanceRequest +from google.cloud.notebooks_v1.types.service import SetInstanceAcceleratorRequest +from google.cloud.notebooks_v1.types.service import SetInstanceLabelsRequest +from google.cloud.notebooks_v1.types.service import SetInstanceMachineTypeRequest +from google.cloud.notebooks_v1.types.service import StartInstanceRequest +from google.cloud.notebooks_v1.types.service import StopInstanceRequest +from google.cloud.notebooks_v1.types.service import TriggerScheduleRequest +from google.cloud.notebooks_v1.types.service import UpdateInstanceConfigRequest +from google.cloud.notebooks_v1.types.service import UpdateShieldedInstanceConfigRequest +from google.cloud.notebooks_v1.types.service import UpgradeInstanceInternalRequest +from google.cloud.notebooks_v1.types.service import UpgradeInstanceRequest __all__ = ( + "ManagedNotebookServiceClient", + "ManagedNotebookServiceAsyncClient", "NotebookServiceClient", "NotebookServiceAsyncClient", "ContainerImage", "Environment", "VmImage", + "Event", + "Execution", + "ExecutionTemplate", "Instance", + "ReservationAffinity", + "InstanceConfig", + "CreateRuntimeRequest", + "DeleteRuntimeRequest", + "GetRuntimeRequest", + "ListRuntimesRequest", + "ListRuntimesResponse", + "ReportRuntimeEventRequest", + "ResetRuntimeRequest", + "StartRuntimeRequest", + "StopRuntimeRequest", + "SwitchRuntimeRequest", + "EncryptionConfig", + "LocalDisk", + "LocalDiskInitializeParams", + "Runtime", + "RuntimeAcceleratorConfig", + "RuntimeAccessConfig", + "RuntimeMetrics", + "RuntimeShieldedInstanceConfig", + "RuntimeSoftwareConfig", + "VirtualMachine", + "VirtualMachineConfig", + "Schedule", "CreateEnvironmentRequest", + "CreateExecutionRequest", "CreateInstanceRequest", + "CreateScheduleRequest", "DeleteEnvironmentRequest", + "DeleteExecutionRequest", "DeleteInstanceRequest", + "DeleteScheduleRequest", "GetEnvironmentRequest", + "GetExecutionRequest", + "GetInstanceHealthRequest", + "GetInstanceHealthResponse", "GetInstanceRequest", + "GetScheduleRequest", "IsInstanceUpgradeableRequest", "IsInstanceUpgradeableResponse", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "ListExecutionsRequest", + "ListExecutionsResponse", "ListInstancesRequest", "ListInstancesResponse", + "ListSchedulesRequest", + "ListSchedulesResponse", "OperationMetadata", "RegisterInstanceRequest", "ReportInstanceInfoRequest", "ResetInstanceRequest", + "RollbackInstanceRequest", "SetInstanceAcceleratorRequest", "SetInstanceLabelsRequest", "SetInstanceMachineTypeRequest", "StartInstanceRequest", "StopInstanceRequest", + "TriggerScheduleRequest", + "UpdateInstanceConfigRequest", + "UpdateShieldedInstanceConfigRequest", "UpgradeInstanceInternalRequest", "UpgradeInstanceRequest", ) diff --git a/google/cloud/notebooks_v1/__init__.py b/google/cloud/notebooks_v1/__init__.py new file mode 100644 index 0000000..08dfbcf --- /dev/null +++ b/google/cloud/notebooks_v1/__init__.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.managed_notebook_service import ManagedNotebookServiceClient +from .services.managed_notebook_service import ManagedNotebookServiceAsyncClient +from .services.notebook_service import NotebookServiceClient +from .services.notebook_service import NotebookServiceAsyncClient + +from .types.environment import ContainerImage +from .types.environment import Environment +from .types.environment import VmImage +from .types.event import Event +from .types.execution import Execution +from .types.execution import ExecutionTemplate +from .types.instance import Instance +from .types.instance import ReservationAffinity +from .types.instance_config import InstanceConfig +from .types.managed_service import CreateRuntimeRequest +from .types.managed_service import DeleteRuntimeRequest +from .types.managed_service import GetRuntimeRequest +from .types.managed_service import ListRuntimesRequest +from .types.managed_service import ListRuntimesResponse +from .types.managed_service import ReportRuntimeEventRequest +from .types.managed_service import ResetRuntimeRequest +from .types.managed_service import StartRuntimeRequest +from .types.managed_service import StopRuntimeRequest +from .types.managed_service import SwitchRuntimeRequest +from .types.runtime import EncryptionConfig +from .types.runtime import LocalDisk +from .types.runtime import LocalDiskInitializeParams +from .types.runtime import Runtime +from .types.runtime import RuntimeAcceleratorConfig +from .types.runtime import RuntimeAccessConfig +from .types.runtime import RuntimeMetrics +from .types.runtime import RuntimeShieldedInstanceConfig +from .types.runtime import RuntimeSoftwareConfig +from .types.runtime import VirtualMachine +from .types.runtime import VirtualMachineConfig +from .types.schedule import Schedule +from .types.service import CreateEnvironmentRequest +from .types.service import CreateExecutionRequest +from .types.service import CreateInstanceRequest +from .types.service import CreateScheduleRequest +from .types.service import DeleteEnvironmentRequest +from .types.service import DeleteExecutionRequest +from .types.service import DeleteInstanceRequest +from .types.service import DeleteScheduleRequest +from .types.service import GetEnvironmentRequest +from .types.service import GetExecutionRequest +from .types.service import GetInstanceHealthRequest +from .types.service import GetInstanceHealthResponse +from .types.service import GetInstanceRequest +from .types.service import GetScheduleRequest +from .types.service import IsInstanceUpgradeableRequest +from .types.service import IsInstanceUpgradeableResponse +from .types.service import ListEnvironmentsRequest +from .types.service import ListEnvironmentsResponse +from .types.service import ListExecutionsRequest +from .types.service import ListExecutionsResponse +from .types.service import ListInstancesRequest +from .types.service import ListInstancesResponse +from .types.service import ListSchedulesRequest +from .types.service import ListSchedulesResponse +from .types.service import OperationMetadata +from .types.service import RegisterInstanceRequest +from .types.service import ReportInstanceInfoRequest +from .types.service import ResetInstanceRequest +from .types.service import RollbackInstanceRequest +from .types.service import SetInstanceAcceleratorRequest +from .types.service import SetInstanceLabelsRequest +from .types.service import SetInstanceMachineTypeRequest +from .types.service import StartInstanceRequest +from .types.service import StopInstanceRequest +from .types.service import TriggerScheduleRequest +from .types.service import UpdateInstanceConfigRequest +from .types.service import UpdateShieldedInstanceConfigRequest +from .types.service import UpgradeInstanceInternalRequest +from .types.service import UpgradeInstanceRequest + +__all__ = ( + "ManagedNotebookServiceAsyncClient", + "NotebookServiceAsyncClient", + "ContainerImage", + "CreateEnvironmentRequest", + "CreateExecutionRequest", + "CreateInstanceRequest", + "CreateRuntimeRequest", + "CreateScheduleRequest", + "DeleteEnvironmentRequest", + "DeleteExecutionRequest", + "DeleteInstanceRequest", + "DeleteRuntimeRequest", + "DeleteScheduleRequest", + "EncryptionConfig", + "Environment", + "Event", + "Execution", + "ExecutionTemplate", + "GetEnvironmentRequest", + "GetExecutionRequest", + "GetInstanceHealthRequest", + "GetInstanceHealthResponse", + "GetInstanceRequest", + "GetRuntimeRequest", + "GetScheduleRequest", + "Instance", + "InstanceConfig", + "IsInstanceUpgradeableRequest", + "IsInstanceUpgradeableResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "ListExecutionsRequest", + "ListExecutionsResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListRuntimesRequest", + "ListRuntimesResponse", + "ListSchedulesRequest", + "ListSchedulesResponse", + "LocalDisk", + "LocalDiskInitializeParams", + "ManagedNotebookServiceClient", + "NotebookServiceClient", + "OperationMetadata", + "RegisterInstanceRequest", + "ReportInstanceInfoRequest", + "ReportRuntimeEventRequest", + "ReservationAffinity", + "ResetInstanceRequest", + "ResetRuntimeRequest", + "RollbackInstanceRequest", + "Runtime", + "RuntimeAcceleratorConfig", + "RuntimeAccessConfig", + "RuntimeMetrics", + "RuntimeShieldedInstanceConfig", + "RuntimeSoftwareConfig", + "Schedule", + "SetInstanceAcceleratorRequest", + "SetInstanceLabelsRequest", + "SetInstanceMachineTypeRequest", + "StartInstanceRequest", + "StartRuntimeRequest", + "StopInstanceRequest", + "StopRuntimeRequest", + "SwitchRuntimeRequest", + "TriggerScheduleRequest", + "UpdateInstanceConfigRequest", + "UpdateShieldedInstanceConfigRequest", + "UpgradeInstanceInternalRequest", + "UpgradeInstanceRequest", + "VirtualMachine", + "VirtualMachineConfig", + "VmImage", +) diff --git a/google/cloud/notebooks_v1/gapic_metadata.json b/google/cloud/notebooks_v1/gapic_metadata.json new file mode 100644 index 0000000..9c93633 --- /dev/null +++ b/google/cloud/notebooks_v1/gapic_metadata.json @@ -0,0 +1,447 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.notebooks_v1", + "protoPackage": "google.cloud.notebooks.v1", + "schema": "1.0", + "services": { + "ManagedNotebookService": { + "clients": { + "grpc": { + "libraryClient": "ManagedNotebookServiceClient", + "rpcs": { + "CreateRuntime": { + "methods": [ + "create_runtime" + ] + }, + "DeleteRuntime": { + "methods": [ + "delete_runtime" + ] + }, + "GetRuntime": { + "methods": [ + "get_runtime" + ] + }, + "ListRuntimes": { + "methods": [ + "list_runtimes" + ] + }, + "ReportRuntimeEvent": { + "methods": [ + "report_runtime_event" + ] + }, + "ResetRuntime": { + "methods": [ + "reset_runtime" + ] + }, + "StartRuntime": { + "methods": [ + "start_runtime" + ] + }, + "StopRuntime": { + "methods": [ + "stop_runtime" + ] + }, + "SwitchRuntime": { + "methods": [ + "switch_runtime" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ManagedNotebookServiceAsyncClient", + "rpcs": { + "CreateRuntime": { + "methods": [ + "create_runtime" + ] + }, + "DeleteRuntime": { + "methods": [ + "delete_runtime" + ] + }, + "GetRuntime": { + "methods": [ + "get_runtime" + ] + }, + "ListRuntimes": { + "methods": [ + "list_runtimes" + ] + }, + "ReportRuntimeEvent": { + "methods": [ + "report_runtime_event" + ] + }, + "ResetRuntime": { + "methods": [ + "reset_runtime" + ] + }, + "StartRuntime": { + "methods": [ + "start_runtime" + ] + }, + "StopRuntime": { + "methods": [ + "stop_runtime" + ] + }, + "SwitchRuntime": { + "methods": [ + "switch_runtime" + ] + } + } + } + } + }, + "NotebookService": { + "clients": { + "grpc": { + "libraryClient": "NotebookServiceClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateExecution": { + "methods": [ + "create_execution" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSchedule": { + "methods": [ + "create_schedule" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteExecution": { + "methods": [ + "delete_execution" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteSchedule": { + "methods": [ + "delete_schedule" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetExecution": { + "methods": [ + "get_execution" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceHealth": { + "methods": [ + "get_instance_health" + ] + }, + "GetSchedule": { + "methods": [ + "get_schedule" + ] + }, + "IsInstanceUpgradeable": { + "methods": [ + "is_instance_upgradeable" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListExecutions": { + "methods": [ + "list_executions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSchedules": { + "methods": [ + "list_schedules" + ] + }, + "RegisterInstance": { + "methods": [ + "register_instance" + ] + }, + "ReportInstanceInfo": { + "methods": [ + "report_instance_info" + ] + }, + "ResetInstance": { + "methods": [ + "reset_instance" + ] + }, + "RollbackInstance": { + "methods": [ + "rollback_instance" + ] + }, + "SetInstanceAccelerator": { + "methods": [ + "set_instance_accelerator" + ] + }, + "SetInstanceLabels": { + "methods": [ + "set_instance_labels" + ] + }, + "SetInstanceMachineType": { + "methods": [ + "set_instance_machine_type" + ] + }, + "StartInstance": { + "methods": [ + "start_instance" + ] + }, + "StopInstance": { + "methods": [ + "stop_instance" + ] + }, + "TriggerSchedule": { + "methods": [ + "trigger_schedule" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateShieldedInstanceConfig": { + "methods": [ + "update_shielded_instance_config" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + }, + "UpgradeInstanceInternal": { + "methods": [ + "upgrade_instance_internal" + ] + } + } + }, + "grpc-async": { + "libraryClient": "NotebookServiceAsyncClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "CreateExecution": { + "methods": [ + "create_execution" + ] + }, + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateSchedule": { + "methods": [ + "create_schedule" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeleteExecution": { + "methods": [ + "delete_execution" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteSchedule": { + "methods": [ + "delete_schedule" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "GetExecution": { + "methods": [ + "get_execution" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceHealth": { + "methods": [ + "get_instance_health" + ] + }, + "GetSchedule": { + "methods": [ + "get_schedule" + ] + }, + "IsInstanceUpgradeable": { + "methods": [ + "is_instance_upgradeable" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "ListExecutions": { + "methods": [ + "list_executions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "ListSchedules": { + "methods": [ + "list_schedules" + ] + }, + "RegisterInstance": { + "methods": [ + "register_instance" + ] + }, + "ReportInstanceInfo": { + "methods": [ + "report_instance_info" + ] + }, + "ResetInstance": { + "methods": [ + "reset_instance" + ] + }, + "RollbackInstance": { + "methods": [ + "rollback_instance" + ] + }, + "SetInstanceAccelerator": { + "methods": [ + "set_instance_accelerator" + ] + }, + "SetInstanceLabels": { + "methods": [ + "set_instance_labels" + ] + }, + "SetInstanceMachineType": { + "methods": [ + "set_instance_machine_type" + ] + }, + "StartInstance": { + "methods": [ + "start_instance" + ] + }, + "StopInstance": { + "methods": [ + "stop_instance" + ] + }, + "TriggerSchedule": { + "methods": [ + "trigger_schedule" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateShieldedInstanceConfig": { + "methods": [ + "update_shielded_instance_config" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + }, + "UpgradeInstanceInternal": { + "methods": [ + "upgrade_instance_internal" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/notebooks_v1/py.typed b/google/cloud/notebooks_v1/py.typed new file mode 100644 index 0000000..05d6f21 --- /dev/null +++ b/google/cloud/notebooks_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-notebooks package uses inline types. diff --git a/google/cloud/notebooks_v1/services/__init__.py b/google/cloud/notebooks_v1/services/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/google/cloud/notebooks_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/__init__.py b/google/cloud/notebooks_v1/services/managed_notebook_service/__init__.py new file mode 100644 index 0000000..deb5c09 --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ManagedNotebookServiceClient +from .async_client import ManagedNotebookServiceAsyncClient + +__all__ = ( + "ManagedNotebookServiceClient", + "ManagedNotebookServiceAsyncClient", +) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py b/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py new file mode 100644 index 0000000..942b7e6 --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/async_client.py @@ -0,0 +1,950 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.notebooks_v1.services.managed_notebook_service import pagers +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime +from google.cloud.notebooks_v1.types import runtime as gcn_runtime +from google.cloud.notebooks_v1.types import service +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ManagedNotebookServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ManagedNotebookServiceGrpcAsyncIOTransport +from .client import ManagedNotebookServiceClient + + +class ManagedNotebookServiceAsyncClient: + """API v1 service for Managed Notebooks.""" + + _client: ManagedNotebookServiceClient + + DEFAULT_ENDPOINT = ManagedNotebookServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ManagedNotebookServiceClient.DEFAULT_MTLS_ENDPOINT + + runtime_path = staticmethod(ManagedNotebookServiceClient.runtime_path) + parse_runtime_path = staticmethod(ManagedNotebookServiceClient.parse_runtime_path) + common_billing_account_path = staticmethod( + ManagedNotebookServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ManagedNotebookServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ManagedNotebookServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ManagedNotebookServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ManagedNotebookServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ManagedNotebookServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ManagedNotebookServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ManagedNotebookServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + ManagedNotebookServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + ManagedNotebookServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedNotebookServiceAsyncClient: The constructed client. + """ + return ManagedNotebookServiceClient.from_service_account_info.__func__(ManagedNotebookServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedNotebookServiceAsyncClient: The constructed client. + """ + return ManagedNotebookServiceClient.from_service_account_file.__func__(ManagedNotebookServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ManagedNotebookServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ManagedNotebookServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ManagedNotebookServiceClient).get_transport_class, + type(ManagedNotebookServiceClient), + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, ManagedNotebookServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the managed notebook service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ManagedNotebookServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ManagedNotebookServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_runtimes( + self, + request: managed_service.ListRuntimesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRuntimesAsyncPager: + r"""Lists Runtimes in a given project and location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ListRuntimesRequest`): + The request object. Request for listing Managed Notebook + Runtimes. + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.managed_notebook_service.pagers.ListRuntimesAsyncPager: + Response for listing Managed Notebook + Runtimes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.ListRuntimesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_runtimes, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRuntimesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_runtime( + self, + request: managed_service.GetRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime.Runtime: + r"""Gets details of a single Runtime. The location must + be a regional endpoint rather than zonal. + + Args: + request (:class:`google.cloud.notebooks_v1.types.GetRuntimeRequest`): + The request object. Request for getting a Managed + Notebook Runtime. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Runtime: + The definition of a Runtime for a + managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.GetRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_runtime, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_runtime( + self, + request: managed_service.CreateRuntimeRequest = None, + *, + parent: str = None, + runtime_id: str = None, + runtime: gcn_runtime.Runtime = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Runtime in a given project and + location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.CreateRuntimeRequest`): + The request object. Request for creating a Managed + Notebook Runtime. + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime_id (:class:`str`): + Required. User-defined unique ID of + this Runtime. + + This corresponds to the ``runtime_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime (:class:`google.cloud.notebooks_v1.types.Runtime`): + Required. The Runtime to be created. + This corresponds to the ``runtime`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, runtime_id, runtime]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.CreateRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if runtime_id is not None: + request.runtime_id = runtime_id + if runtime is not None: + request.runtime = runtime + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_runtime, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_runtime( + self, + request: managed_service.DeleteRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Runtime. + + Args: + request (:class:`google.cloud.notebooks_v1.types.DeleteRuntimeRequest`): + The request object. Request for deleting a Managed + Notebook Runtime. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.DeleteRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_runtime, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def start_runtime( + self, + request: managed_service.StartRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a Managed Notebook Runtime. + Perform "Start" on GPU instances; "Resume" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Args: + request (:class:`google.cloud.notebooks_v1.types.StartRuntimeRequest`): + The request object. Request for starting a Managed + Notebook Runtime. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.StartRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_runtime, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_runtime( + self, + request: managed_service.StopRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops a Managed Notebook Runtime. + Perform "Stop" on GPU instances; "Suspend" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Args: + request (:class:`google.cloud.notebooks_v1.types.StopRuntimeRequest`): + The request object. Request for stopping a Managed + Notebook Runtime. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.StopRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_runtime, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def switch_runtime( + self, + request: managed_service.SwitchRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Switch a Managed Notebook Runtime. + + Args: + request (:class:`google.cloud.notebooks_v1.types.SwitchRuntimeRequest`): + The request object. Request for switching a Managed + Notebook Runtime. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.SwitchRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.switch_runtime, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def reset_runtime( + self, + request: managed_service.ResetRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resets a Managed Notebook Runtime. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ResetRuntimeRequest`): + The request object. Request for reseting a Managed + Notebook Runtime. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.ResetRuntimeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_runtime, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def report_runtime_event( + self, + request: managed_service.ReportRuntimeEventRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Report and process a runtime event. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ReportRuntimeEventRequest`): + The request object. Request for reporting a Managed + Notebook Event. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = managed_service.ReportRuntimeEventRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.report_runtime_event, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ManagedNotebookServiceAsyncClient",) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/client.py b/google/cloud/notebooks_v1/services/managed_notebook_service/client.py new file mode 100644 index 0000000..ed78fef --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/client.py @@ -0,0 +1,1138 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.notebooks_v1.services.managed_notebook_service import pagers +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime +from google.cloud.notebooks_v1.types import runtime as gcn_runtime +from google.cloud.notebooks_v1.types import service +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ManagedNotebookServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ManagedNotebookServiceGrpcTransport +from .transports.grpc_asyncio import ManagedNotebookServiceGrpcAsyncIOTransport + + +class ManagedNotebookServiceClientMeta(type): + """Metaclass for the ManagedNotebookService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ManagedNotebookServiceTransport]] + _transport_registry["grpc"] = ManagedNotebookServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ManagedNotebookServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[ManagedNotebookServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ManagedNotebookServiceClient(metaclass=ManagedNotebookServiceClientMeta): + """API v1 service for Managed Notebooks.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "notebooks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedNotebookServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ManagedNotebookServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ManagedNotebookServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ManagedNotebookServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def runtime_path(project: str, location: str, runtime: str,) -> str: + """Returns a fully-qualified runtime string.""" + return "projects/{project}/locations/{location}/runtimes/{runtime}".format( + project=project, location=location, runtime=runtime, + ) + + @staticmethod + def parse_runtime_path(path: str) -> Dict[str, str]: + """Parses a runtime path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/runtimes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ManagedNotebookServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the managed notebook service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ManagedNotebookServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ManagedNotebookServiceTransport): + # transport is a ManagedNotebookServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_runtimes( + self, + request: managed_service.ListRuntimesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRuntimesPager: + r"""Lists Runtimes in a given project and location. + + Args: + request (google.cloud.notebooks_v1.types.ListRuntimesRequest): + The request object. Request for listing Managed Notebook + Runtimes. + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.managed_notebook_service.pagers.ListRuntimesPager: + Response for listing Managed Notebook + Runtimes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.ListRuntimesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.ListRuntimesRequest): + request = managed_service.ListRuntimesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_runtimes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRuntimesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_runtime( + self, + request: managed_service.GetRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> runtime.Runtime: + r"""Gets details of a single Runtime. The location must + be a regional endpoint rather than zonal. + + Args: + request (google.cloud.notebooks_v1.types.GetRuntimeRequest): + The request object. Request for getting a Managed + Notebook Runtime. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Runtime: + The definition of a Runtime for a + managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.GetRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.GetRuntimeRequest): + request = managed_service.GetRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_runtime( + self, + request: managed_service.CreateRuntimeRequest = None, + *, + parent: str = None, + runtime_id: str = None, + runtime: gcn_runtime.Runtime = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Runtime in a given project and + location. + + Args: + request (google.cloud.notebooks_v1.types.CreateRuntimeRequest): + The request object. Request for creating a Managed + Notebook Runtime. + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime_id (str): + Required. User-defined unique ID of + this Runtime. + + This corresponds to the ``runtime_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + runtime (google.cloud.notebooks_v1.types.Runtime): + Required. The Runtime to be created. + This corresponds to the ``runtime`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, runtime_id, runtime]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.CreateRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.CreateRuntimeRequest): + request = managed_service.CreateRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if runtime_id is not None: + request.runtime_id = runtime_id + if runtime is not None: + request.runtime = runtime + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_runtime( + self, + request: managed_service.DeleteRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Runtime. + + Args: + request (google.cloud.notebooks_v1.types.DeleteRuntimeRequest): + The request object. Request for deleting a Managed + Notebook Runtime. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.DeleteRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.DeleteRuntimeRequest): + request = managed_service.DeleteRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def start_runtime( + self, + request: managed_service.StartRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a Managed Notebook Runtime. + Perform "Start" on GPU instances; "Resume" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Args: + request (google.cloud.notebooks_v1.types.StartRuntimeRequest): + The request object. Request for starting a Managed + Notebook Runtime. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.StartRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.StartRuntimeRequest): + request = managed_service.StartRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def stop_runtime( + self, + request: managed_service.StopRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops a Managed Notebook Runtime. + Perform "Stop" on GPU instances; "Suspend" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Args: + request (google.cloud.notebooks_v1.types.StopRuntimeRequest): + The request object. Request for stopping a Managed + Notebook Runtime. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.StopRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.StopRuntimeRequest): + request = managed_service.StopRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def switch_runtime( + self, + request: managed_service.SwitchRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Switch a Managed Notebook Runtime. + + Args: + request (google.cloud.notebooks_v1.types.SwitchRuntimeRequest): + The request object. Request for switching a Managed + Notebook Runtime. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.SwitchRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.SwitchRuntimeRequest): + request = managed_service.SwitchRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.switch_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def reset_runtime( + self, + request: managed_service.ResetRuntimeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resets a Managed Notebook Runtime. + + Args: + request (google.cloud.notebooks_v1.types.ResetRuntimeRequest): + The request object. Request for reseting a Managed + Notebook Runtime. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.ResetRuntimeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.ResetRuntimeRequest): + request = managed_service.ResetRuntimeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset_runtime] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def report_runtime_event( + self, + request: managed_service.ReportRuntimeEventRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Report and process a runtime event. + + Args: + request (google.cloud.notebooks_v1.types.ReportRuntimeEventRequest): + The request object. Request for reporting a Managed + Notebook Event. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Runtime` The + definition of a Runtime for a managed notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a managed_service.ReportRuntimeEventRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, managed_service.ReportRuntimeEventRequest): + request = managed_service.ReportRuntimeEventRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.report_runtime_event] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + runtime.Runtime, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ManagedNotebookServiceClient",) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/pagers.py b/google/cloud/notebooks_v1/services/managed_notebook_service/pagers.py new file mode 100644 index 0000000..5ab2230 --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/pagers.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime + + +class ListRuntimesPager: + """A pager for iterating through ``list_runtimes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListRuntimesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``runtimes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRuntimes`` requests and continue to iterate + through the ``runtimes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListRuntimesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., managed_service.ListRuntimesResponse], + request: managed_service.ListRuntimesRequest, + response: managed_service.ListRuntimesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListRuntimesRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListRuntimesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_service.ListRuntimesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[managed_service.ListRuntimesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[runtime.Runtime]: + for page in self.pages: + yield from page.runtimes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRuntimesAsyncPager: + """A pager for iterating through ``list_runtimes`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListRuntimesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``runtimes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRuntimes`` requests and continue to iterate + through the ``runtimes`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListRuntimesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[managed_service.ListRuntimesResponse]], + request: managed_service.ListRuntimesRequest, + response: managed_service.ListRuntimesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListRuntimesRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListRuntimesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = managed_service.ListRuntimesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[managed_service.ListRuntimesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[runtime.Runtime]: + async def async_generator(): + async for page in self.pages: + for response in page.runtimes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/transports/__init__.py b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/__init__.py new file mode 100644 index 0000000..948e4ee --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ManagedNotebookServiceTransport +from .grpc import ManagedNotebookServiceGrpcTransport +from .grpc_asyncio import ManagedNotebookServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ManagedNotebookServiceTransport]] +_transport_registry["grpc"] = ManagedNotebookServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ManagedNotebookServiceGrpcAsyncIOTransport + +__all__ = ( + "ManagedNotebookServiceTransport", + "ManagedNotebookServiceGrpcTransport", + "ManagedNotebookServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/transports/base.py b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/base.py new file mode 100644 index 0000000..9589819 --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/base.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime +from google.longrunning import operations_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class ManagedNotebookServiceTransport(abc.ABC): + """Abstract transport class for ManagedNotebookService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "notebooks.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_runtimes: gapic_v1.method.wrap_method( + self.list_runtimes, default_timeout=60.0, client_info=client_info, + ), + self.get_runtime: gapic_v1.method.wrap_method( + self.get_runtime, default_timeout=60.0, client_info=client_info, + ), + self.create_runtime: gapic_v1.method.wrap_method( + self.create_runtime, default_timeout=60.0, client_info=client_info, + ), + self.delete_runtime: gapic_v1.method.wrap_method( + self.delete_runtime, default_timeout=60.0, client_info=client_info, + ), + self.start_runtime: gapic_v1.method.wrap_method( + self.start_runtime, default_timeout=60.0, client_info=client_info, + ), + self.stop_runtime: gapic_v1.method.wrap_method( + self.stop_runtime, default_timeout=60.0, client_info=client_info, + ), + self.switch_runtime: gapic_v1.method.wrap_method( + self.switch_runtime, default_timeout=60.0, client_info=client_info, + ), + self.reset_runtime: gapic_v1.method.wrap_method( + self.reset_runtime, default_timeout=None, client_info=client_info, + ), + self.report_runtime_event: gapic_v1.method.wrap_method( + self.report_runtime_event, + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_runtimes( + self, + ) -> Callable[ + [managed_service.ListRuntimesRequest], + Union[ + managed_service.ListRuntimesResponse, + Awaitable[managed_service.ListRuntimesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_runtime( + self, + ) -> Callable[ + [managed_service.GetRuntimeRequest], + Union[runtime.Runtime, Awaitable[runtime.Runtime]], + ]: + raise NotImplementedError() + + @property + def create_runtime( + self, + ) -> Callable[ + [managed_service.CreateRuntimeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_runtime( + self, + ) -> Callable[ + [managed_service.DeleteRuntimeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def start_runtime( + self, + ) -> Callable[ + [managed_service.StartRuntimeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def stop_runtime( + self, + ) -> Callable[ + [managed_service.StopRuntimeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def switch_runtime( + self, + ) -> Callable[ + [managed_service.SwitchRuntimeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def reset_runtime( + self, + ) -> Callable[ + [managed_service.ResetRuntimeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def report_runtime_event( + self, + ) -> Callable[ + [managed_service.ReportRuntimeEventRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + +__all__ = ("ManagedNotebookServiceTransport",) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/transports/grpc.py b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/grpc.py new file mode 100644 index 0000000..f23f538 --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/grpc.py @@ -0,0 +1,499 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime +from google.longrunning import operations_pb2 # type: ignore +from .base import ManagedNotebookServiceTransport, DEFAULT_CLIENT_INFO + + +class ManagedNotebookServiceGrpcTransport(ManagedNotebookServiceTransport): + """gRPC backend transport for ManagedNotebookService. + + API v1 service for Managed Notebooks. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_runtimes( + self, + ) -> Callable[ + [managed_service.ListRuntimesRequest], managed_service.ListRuntimesResponse + ]: + r"""Return a callable for the list runtimes method over gRPC. + + Lists Runtimes in a given project and location. + + Returns: + Callable[[~.ListRuntimesRequest], + ~.ListRuntimesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_runtimes" not in self._stubs: + self._stubs["list_runtimes"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/ListRuntimes", + request_serializer=managed_service.ListRuntimesRequest.serialize, + response_deserializer=managed_service.ListRuntimesResponse.deserialize, + ) + return self._stubs["list_runtimes"] + + @property + def get_runtime( + self, + ) -> Callable[[managed_service.GetRuntimeRequest], runtime.Runtime]: + r"""Return a callable for the get runtime method over gRPC. + + Gets details of a single Runtime. The location must + be a regional endpoint rather than zonal. + + Returns: + Callable[[~.GetRuntimeRequest], + ~.Runtime]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_runtime" not in self._stubs: + self._stubs["get_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/GetRuntime", + request_serializer=managed_service.GetRuntimeRequest.serialize, + response_deserializer=runtime.Runtime.deserialize, + ) + return self._stubs["get_runtime"] + + @property + def create_runtime( + self, + ) -> Callable[[managed_service.CreateRuntimeRequest], operations_pb2.Operation]: + r"""Return a callable for the create runtime method over gRPC. + + Creates a new Runtime in a given project and + location. + + Returns: + Callable[[~.CreateRuntimeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_runtime" not in self._stubs: + self._stubs["create_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/CreateRuntime", + request_serializer=managed_service.CreateRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_runtime"] + + @property + def delete_runtime( + self, + ) -> Callable[[managed_service.DeleteRuntimeRequest], operations_pb2.Operation]: + r"""Return a callable for the delete runtime method over gRPC. + + Deletes a single Runtime. + + Returns: + Callable[[~.DeleteRuntimeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_runtime" not in self._stubs: + self._stubs["delete_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/DeleteRuntime", + request_serializer=managed_service.DeleteRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_runtime"] + + @property + def start_runtime( + self, + ) -> Callable[[managed_service.StartRuntimeRequest], operations_pb2.Operation]: + r"""Return a callable for the start runtime method over gRPC. + + Starts a Managed Notebook Runtime. + Perform "Start" on GPU instances; "Resume" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Returns: + Callable[[~.StartRuntimeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_runtime" not in self._stubs: + self._stubs["start_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/StartRuntime", + request_serializer=managed_service.StartRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_runtime"] + + @property + def stop_runtime( + self, + ) -> Callable[[managed_service.StopRuntimeRequest], operations_pb2.Operation]: + r"""Return a callable for the stop runtime method over gRPC. + + Stops a Managed Notebook Runtime. + Perform "Stop" on GPU instances; "Suspend" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Returns: + Callable[[~.StopRuntimeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_runtime" not in self._stubs: + self._stubs["stop_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/StopRuntime", + request_serializer=managed_service.StopRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_runtime"] + + @property + def switch_runtime( + self, + ) -> Callable[[managed_service.SwitchRuntimeRequest], operations_pb2.Operation]: + r"""Return a callable for the switch runtime method over gRPC. + + Switch a Managed Notebook Runtime. + + Returns: + Callable[[~.SwitchRuntimeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switch_runtime" not in self._stubs: + self._stubs["switch_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/SwitchRuntime", + request_serializer=managed_service.SwitchRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switch_runtime"] + + @property + def reset_runtime( + self, + ) -> Callable[[managed_service.ResetRuntimeRequest], operations_pb2.Operation]: + r"""Return a callable for the reset runtime method over gRPC. + + Resets a Managed Notebook Runtime. + + Returns: + Callable[[~.ResetRuntimeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_runtime" not in self._stubs: + self._stubs["reset_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/ResetRuntime", + request_serializer=managed_service.ResetRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_runtime"] + + @property + def report_runtime_event( + self, + ) -> Callable[ + [managed_service.ReportRuntimeEventRequest], operations_pb2.Operation + ]: + r"""Return a callable for the report runtime event method over gRPC. + + Report and process a runtime event. + + Returns: + Callable[[~.ReportRuntimeEventRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "report_runtime_event" not in self._stubs: + self._stubs["report_runtime_event"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/ReportRuntimeEvent", + request_serializer=managed_service.ReportRuntimeEventRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["report_runtime_event"] + + +__all__ = ("ManagedNotebookServiceGrpcTransport",) diff --git a/google/cloud/notebooks_v1/services/managed_notebook_service/transports/grpc_asyncio.py b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..546bade --- /dev/null +++ b/google/cloud/notebooks_v1/services/managed_notebook_service/transports/grpc_asyncio.py @@ -0,0 +1,517 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime +from google.longrunning import operations_pb2 # type: ignore +from .base import ManagedNotebookServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import ManagedNotebookServiceGrpcTransport + + +class ManagedNotebookServiceGrpcAsyncIOTransport(ManagedNotebookServiceTransport): + """gRPC AsyncIO backend transport for ManagedNotebookService. + + API v1 service for Managed Notebooks. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_runtimes( + self, + ) -> Callable[ + [managed_service.ListRuntimesRequest], + Awaitable[managed_service.ListRuntimesResponse], + ]: + r"""Return a callable for the list runtimes method over gRPC. + + Lists Runtimes in a given project and location. + + Returns: + Callable[[~.ListRuntimesRequest], + Awaitable[~.ListRuntimesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_runtimes" not in self._stubs: + self._stubs["list_runtimes"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/ListRuntimes", + request_serializer=managed_service.ListRuntimesRequest.serialize, + response_deserializer=managed_service.ListRuntimesResponse.deserialize, + ) + return self._stubs["list_runtimes"] + + @property + def get_runtime( + self, + ) -> Callable[[managed_service.GetRuntimeRequest], Awaitable[runtime.Runtime]]: + r"""Return a callable for the get runtime method over gRPC. + + Gets details of a single Runtime. The location must + be a regional endpoint rather than zonal. + + Returns: + Callable[[~.GetRuntimeRequest], + Awaitable[~.Runtime]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_runtime" not in self._stubs: + self._stubs["get_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/GetRuntime", + request_serializer=managed_service.GetRuntimeRequest.serialize, + response_deserializer=runtime.Runtime.deserialize, + ) + return self._stubs["get_runtime"] + + @property + def create_runtime( + self, + ) -> Callable[ + [managed_service.CreateRuntimeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create runtime method over gRPC. + + Creates a new Runtime in a given project and + location. + + Returns: + Callable[[~.CreateRuntimeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_runtime" not in self._stubs: + self._stubs["create_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/CreateRuntime", + request_serializer=managed_service.CreateRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_runtime"] + + @property + def delete_runtime( + self, + ) -> Callable[ + [managed_service.DeleteRuntimeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete runtime method over gRPC. + + Deletes a single Runtime. + + Returns: + Callable[[~.DeleteRuntimeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_runtime" not in self._stubs: + self._stubs["delete_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/DeleteRuntime", + request_serializer=managed_service.DeleteRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_runtime"] + + @property + def start_runtime( + self, + ) -> Callable[ + [managed_service.StartRuntimeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the start runtime method over gRPC. + + Starts a Managed Notebook Runtime. + Perform "Start" on GPU instances; "Resume" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Returns: + Callable[[~.StartRuntimeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_runtime" not in self._stubs: + self._stubs["start_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/StartRuntime", + request_serializer=managed_service.StartRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_runtime"] + + @property + def stop_runtime( + self, + ) -> Callable[ + [managed_service.StopRuntimeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the stop runtime method over gRPC. + + Stops a Managed Notebook Runtime. + Perform "Stop" on GPU instances; "Suspend" on CPU + instances See: + https://cloud.google.com/compute/docs/instances/stop- + start-instance + https://cloud.google.com/compute/docs/instances/suspend- + resume-instance + + Returns: + Callable[[~.StopRuntimeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_runtime" not in self._stubs: + self._stubs["stop_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/StopRuntime", + request_serializer=managed_service.StopRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_runtime"] + + @property + def switch_runtime( + self, + ) -> Callable[ + [managed_service.SwitchRuntimeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the switch runtime method over gRPC. + + Switch a Managed Notebook Runtime. + + Returns: + Callable[[~.SwitchRuntimeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "switch_runtime" not in self._stubs: + self._stubs["switch_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/SwitchRuntime", + request_serializer=managed_service.SwitchRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["switch_runtime"] + + @property + def reset_runtime( + self, + ) -> Callable[ + [managed_service.ResetRuntimeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the reset runtime method over gRPC. + + Resets a Managed Notebook Runtime. + + Returns: + Callable[[~.ResetRuntimeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_runtime" not in self._stubs: + self._stubs["reset_runtime"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/ResetRuntime", + request_serializer=managed_service.ResetRuntimeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_runtime"] + + @property + def report_runtime_event( + self, + ) -> Callable[ + [managed_service.ReportRuntimeEventRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the report runtime event method over gRPC. + + Report and process a runtime event. + + Returns: + Callable[[~.ReportRuntimeEventRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "report_runtime_event" not in self._stubs: + self._stubs["report_runtime_event"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.ManagedNotebookService/ReportRuntimeEvent", + request_serializer=managed_service.ReportRuntimeEventRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["report_runtime_event"] + + +__all__ = ("ManagedNotebookServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/notebooks_v1/services/notebook_service/__init__.py b/google/cloud/notebooks_v1/services/notebook_service/__init__.py new file mode 100644 index 0000000..a17402b --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import NotebookServiceClient +from .async_client import NotebookServiceAsyncClient + +__all__ = ( + "NotebookServiceClient", + "NotebookServiceAsyncClient", +) diff --git a/google/cloud/notebooks_v1/services/notebook_service/async_client.py b/google/cloud/notebooks_v1/services/notebook_service/async_client.py new file mode 100644 index 0000000..3eb38cc --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/async_client.py @@ -0,0 +1,2542 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.notebooks_v1.services.notebook_service import pagers +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import environment as gcn_environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import execution as gcn_execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import instance as gcn_instance +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import schedule as gcn_schedule +from google.cloud.notebooks_v1.types import service +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport +from .client import NotebookServiceClient + + +class NotebookServiceAsyncClient: + """API v1 service for Cloud AI Platform Notebooks.""" + + _client: NotebookServiceClient + + DEFAULT_ENDPOINT = NotebookServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = NotebookServiceClient.DEFAULT_MTLS_ENDPOINT + + environment_path = staticmethod(NotebookServiceClient.environment_path) + parse_environment_path = staticmethod(NotebookServiceClient.parse_environment_path) + execution_path = staticmethod(NotebookServiceClient.execution_path) + parse_execution_path = staticmethod(NotebookServiceClient.parse_execution_path) + instance_path = staticmethod(NotebookServiceClient.instance_path) + parse_instance_path = staticmethod(NotebookServiceClient.parse_instance_path) + schedule_path = staticmethod(NotebookServiceClient.schedule_path) + parse_schedule_path = staticmethod(NotebookServiceClient.parse_schedule_path) + common_billing_account_path = staticmethod( + NotebookServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + NotebookServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(NotebookServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + NotebookServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + NotebookServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + NotebookServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(NotebookServiceClient.common_project_path) + parse_common_project_path = staticmethod( + NotebookServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(NotebookServiceClient.common_location_path) + parse_common_location_path = staticmethod( + NotebookServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceAsyncClient: The constructed client. + """ + return NotebookServiceClient.from_service_account_info.__func__(NotebookServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceAsyncClient: The constructed client. + """ + return NotebookServiceClient.from_service_account_file.__func__(NotebookServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NotebookServiceTransport: + """Returns the transport used by the client instance. + + Returns: + NotebookServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(NotebookServiceClient).get_transport_class, type(NotebookServiceClient) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, NotebookServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the notebook service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.NotebookServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = NotebookServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_instances( + self, + request: service.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists instances in a given project and location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ListInstancesRequest`): + The request object. Request for listing notebook + instances. + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListInstancesAsyncPager: + Response for listing notebook + instances. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: service.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance.Instance: + r"""Gets details of a single Instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.GetInstanceRequest`): + The request object. Request for getting a notebook + instance. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Instance: + The definition of a notebook + instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_instance( + self, + request: service.CreateInstanceRequest = None, + *, + parent: str = None, + instance: gcn_instance.Instance = None, + instance_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given project and + location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.CreateInstanceRequest`): + The request object. Request for creating a notebook + instance. + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.notebooks_v1.types.Instance`): + Required. The instance to be created. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. User-defined unique ID of + this instance. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def register_instance( + self, + request: service.RegisterInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Registers an existing legacy notebook instance to the + Notebooks API server. Legacy instances are instances + created with the legacy Compute Engine calls. They are + not manageable by the Notebooks API out of the box. This + call makes these instances manageable by the Notebooks + API. + + Args: + request (:class:`google.cloud.notebooks_v1.types.RegisterInstanceRequest`): + The request object. Request for registering a notebook + instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.RegisterInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.register_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def set_instance_accelerator( + self, + request: service.SetInstanceAcceleratorRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the guest accelerators of a single Instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.SetInstanceAcceleratorRequest`): + The request object. Request for setting instance + accelerator. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.SetInstanceAcceleratorRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_instance_accelerator, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def set_instance_machine_type( + self, + request: service.SetInstanceMachineTypeRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the machine type of a single Instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.SetInstanceMachineTypeRequest`): + The request object. Request for setting instance machine + type. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.SetInstanceMachineTypeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_instance_machine_type, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance_config( + self, + request: service.UpdateInstanceConfigRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update Notebook Instance configurations. + + Args: + request (:class:`google.cloud.notebooks_v1.types.UpdateInstanceConfigRequest`): + The request object. Request for updating instance + configurations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.UpdateInstanceConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_shielded_instance_config( + self, + request: service.UpdateShieldedInstanceConfigRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the Shielded instance configuration of a + single Instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.UpdateShieldedInstanceConfigRequest`): + The request object. Request for updating the Shielded + Instance config for a notebook instance. You can only + use this method on a stopped instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.UpdateShieldedInstanceConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_shielded_instance_config, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def set_instance_labels( + self, + request: service.SetInstanceLabelsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Replaces all the labels of an Instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.SetInstanceLabelsRequest`): + The request object. Request for setting instance labels. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.SetInstanceLabelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_instance_labels, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: service.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.DeleteInstanceRequest`): + The request object. Request for deleting a notebook + instance. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def start_instance( + self, + request: service.StartInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a notebook instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.StartInstanceRequest`): + The request object. Request for starting a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.StartInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_instance( + self, + request: service.StopInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops a notebook instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.StopInstanceRequest`): + The request object. Request for stopping a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.StopInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def reset_instance( + self, + request: service.ResetInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resets a notebook instance. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ResetInstanceRequest`): + The request object. Request for reseting a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.ResetInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reset_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def report_instance_info( + self, + request: service.ReportInstanceInfoRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Allows notebook instances to + report their latest instance information to the + Notebooks API server. The server will merge the reported + information to the instance metadata store. Do not use + this method directly. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ReportInstanceInfoRequest`): + The request object. Request for notebook instances to + report information to Notebooks API. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.ReportInstanceInfoRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.report_instance_info, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def is_instance_upgradeable( + self, + request: service.IsInstanceUpgradeableRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IsInstanceUpgradeableResponse: + r"""Check if a notebook instance is upgradable. + + Args: + request (:class:`google.cloud.notebooks_v1.types.IsInstanceUpgradeableRequest`): + The request object. Request for checking if a notebook + instance is upgradeable. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.IsInstanceUpgradeableResponse: + Response for checking if a notebook + instance is upgradeable. + + """ + # Create or coerce a protobuf request object. + request = service.IsInstanceUpgradeableRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.is_instance_upgradeable, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("notebook_instance", request.notebook_instance),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_instance_health( + self, + request: service.GetInstanceHealthRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GetInstanceHealthResponse: + r"""Check if a notebook instance is healthy. + + Args: + request (:class:`google.cloud.notebooks_v1.types.GetInstanceHealthRequest`): + The request object. Request for checking if a notebook + instance is healthy. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.GetInstanceHealthResponse: + Response for checking if a notebook + instance is healthy. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetInstanceHealthRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance_health, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def upgrade_instance( + self, + request: service.UpgradeInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrades a notebook instance to the latest version. + + Args: + request (:class:`google.cloud.notebooks_v1.types.UpgradeInstanceRequest`): + The request object. Request for upgrading a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.UpgradeInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.upgrade_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def rollback_instance( + self, + request: service.RollbackInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Rollbacks a notebook instance to the previous + version. + + Args: + request (:class:`google.cloud.notebooks_v1.types.RollbackInstanceRequest`): + The request object. Request for rollbacking a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.RollbackInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_instance, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def upgrade_instance_internal( + self, + request: service.UpgradeInstanceInternalRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Allows notebook instances to + call this endpoint to upgrade themselves. Do not use + this method directly. + + Args: + request (:class:`google.cloud.notebooks_v1.types.UpgradeInstanceInternalRequest`): + The request object. Request for upgrading a notebook + instance from within the VM + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + request = service.UpgradeInstanceInternalRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.upgrade_instance_internal, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_environments( + self, + request: service.ListEnvironmentsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsAsyncPager: + r"""Lists environments in a project. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ListEnvironmentsRequest`): + The request object. Request for listing environments. + parent (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListEnvironmentsAsyncPager: + Response for listing environments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListEnvironmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_environments, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnvironmentsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_environment( + self, + request: service.GetEnvironmentRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.Environment: + r"""Gets details of a single Environment. + + Args: + request (:class:`google.cloud.notebooks_v1.types.GetEnvironmentRequest`): + The request object. Request for getting a notebook + environment. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Environment: + Definition of a software environment + that is used to start a notebook + instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_environment, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_environment( + self, + request: service.CreateEnvironmentRequest = None, + *, + parent: str = None, + environment: gcn_environment.Environment = None, + environment_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Environment. + + Args: + request (:class:`google.cloud.notebooks_v1.types.CreateEnvironmentRequest`): + The request object. Request for creating a notebook + environment. + parent (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (:class:`google.cloud.notebooks_v1.types.Environment`): + Required. The environment to be + created. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (:class:`str`): + Required. User-defined unique ID of this environment. + The ``environment_id`` must be 1 to 63 characters long + and contain only lowercase letters, numeric characters, + and dashes. The first character must be a lowercase + letter and the last character cannot be a dash. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.notebooks_v1.types.Environment` Definition of a software environment that is used to start a notebook + instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment, environment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_environment, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_environment.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_environment( + self, + request: service.DeleteEnvironmentRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Environment. + + Args: + request (:class:`google.cloud.notebooks_v1.types.DeleteEnvironmentRequest`): + The request object. Request for deleting a notebook + environment. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_environment, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_schedules( + self, + request: service.ListSchedulesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchedulesAsyncPager: + r"""Lists schedules in a given project and location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.ListSchedulesRequest`): + The request object. Request for listing scheduled + notebook job. + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListSchedulesAsyncPager: + Response for listing scheduled + notebook job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListSchedulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_schedules, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchedulesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_schedule( + self, + request: service.GetScheduleRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.Schedule: + r"""Gets details of schedule + + Args: + request (:class:`google.cloud.notebooks_v1.types.GetScheduleRequest`): + The request object. Request for getting scheduled + notebook. + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Schedule: + The definition of a schedule. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_schedule, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_schedule( + self, + request: service.DeleteScheduleRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes schedule and all underlying jobs + + Args: + request (:class:`google.cloud.notebooks_v1.types.DeleteScheduleRequest`): + The request object. Request for deleting an Schedule + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_schedule, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_schedule( + self, + request: service.CreateScheduleRequest = None, + *, + parent: str = None, + schedule: gcn_schedule.Schedule = None, + schedule_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Scheduled Notebook in a given project + and location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.CreateScheduleRequest`): + The request object. Request for created scheduled + notebooks + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule (:class:`google.cloud.notebooks_v1.types.Schedule`): + Required. The schedule to be created. + This corresponds to the ``schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_id (:class:`str`): + Required. User-defined unique ID of + this schedule. + + This corresponds to the ``schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Schedule` The + definition of a schedule. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schedule, schedule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schedule is not None: + request.schedule = schedule + if schedule_id is not None: + request.schedule_id = schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_schedule, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_schedule.Schedule, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def trigger_schedule( + self, + request: service.TriggerScheduleRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers execution of an existing schedule. + + Args: + request (:class:`google.cloud.notebooks_v1.types.TriggerScheduleRequest`): + The request object. Request for created scheduled + notebooks + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Schedule` The + definition of a schedule. + + """ + # Create or coerce a protobuf request object. + request = service.TriggerScheduleRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.trigger_schedule, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + schedule.Schedule, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_executions( + self, + request: service.ListExecutionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExecutionsAsyncPager: + r"""Lists executions in a given project and location + + Args: + request (:class:`google.cloud.notebooks_v1.types.ListExecutionsRequest`): + The request object. Request for listing scheduled + notebook executions. + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListExecutionsAsyncPager: + Response for listing scheduled + notebook executions + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListExecutionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_executions, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExecutionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_execution( + self, + request: service.GetExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> execution.Execution: + r"""Gets details of executions + + Args: + request (:class:`google.cloud.notebooks_v1.types.GetExecutionRequest`): + The request object. Request for getting scheduled + notebook execution + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/executions/{execution_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Execution: + The definition of a single executed + notebook. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_execution, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_execution( + self, + request: service.DeleteExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes execution + + Args: + request (:class:`google.cloud.notebooks_v1.types.DeleteExecutionRequest`): + The request object. Request for deleting a scheduled + notebook execution + name (:class:`str`): + Required. Format: + ``projects/{project_id}/locations/{location}/executions/{execution_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_execution, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_execution( + self, + request: service.CreateExecutionRequest = None, + *, + parent: str = None, + execution: gcn_execution.Execution = None, + execution_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Scheduled Notebook in a given project + and location. + + Args: + request (:class:`google.cloud.notebooks_v1.types.CreateExecutionRequest`): + The request object. Request to create notebook execution + parent (:class:`str`): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + execution (:class:`google.cloud.notebooks_v1.types.Execution`): + Required. The execution to be + created. + + This corresponds to the ``execution`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + execution_id (:class:`str`): + Required. User-defined unique ID of + this execution. + + This corresponds to the ``execution_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Execution` The + definition of a single executed notebook. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, execution, execution_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if execution is not None: + request.execution = execution + if execution_id is not None: + request.execution_id = execution_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_execution, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_execution.Execution, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("NotebookServiceAsyncClient",) diff --git a/google/cloud/notebooks_v1/services/notebook_service/client.py b/google/cloud/notebooks_v1/services/notebook_service/client.py new file mode 100644 index 0000000..f096385 --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/client.py @@ -0,0 +1,2790 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.notebooks_v1.services.notebook_service import pagers +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import environment as gcn_environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import execution as gcn_execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import instance as gcn_instance +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import schedule as gcn_schedule +from google.cloud.notebooks_v1.types import service +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import NotebookServiceGrpcTransport +from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport + + +class NotebookServiceClientMeta(type): + """Metaclass for the NotebookService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[NotebookServiceTransport]] + _transport_registry["grpc"] = NotebookServiceGrpcTransport + _transport_registry["grpc_asyncio"] = NotebookServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[NotebookServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NotebookServiceClient(metaclass=NotebookServiceClientMeta): + """API v1 service for Cloud AI Platform Notebooks.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "notebooks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NotebookServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NotebookServiceTransport: + """Returns the transport used by the client instance. + + Returns: + NotebookServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def environment_path(project: str, environment: str,) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/environments/{environment}".format( + project=project, environment=environment, + ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str, str]: + """Parses a environment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/environments/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def execution_path(project: str, location: str, execution: str,) -> str: + """Returns a fully-qualified execution string.""" + return "projects/{project}/location/{location}/executions/{execution}".format( + project=project, location=location, execution=execution, + ) + + @staticmethod + def parse_execution_path(path: str) -> Dict[str, str]: + """Parses a execution path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/location/(?P.+?)/executions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path(project: str, instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format( + project=project, instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def schedule_path(project: str, location: str, schedule: str,) -> str: + """Returns a fully-qualified schedule string.""" + return "projects/{project}/location/{location}/schedules/{schedule}".format( + project=project, location=location, schedule=schedule, + ) + + @staticmethod + def parse_schedule_path(path: str) -> Dict[str, str]: + """Parses a schedule path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/location/(?P.+?)/schedules/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, NotebookServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the notebook service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NotebookServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NotebookServiceTransport): + # transport is a NotebookServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_instances( + self, + request: service.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists instances in a given project and location. + + Args: + request (google.cloud.notebooks_v1.types.ListInstancesRequest): + The request object. Request for listing notebook + instances. + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListInstancesPager: + Response for listing notebook + instances. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListInstancesRequest): + request = service.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: service.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance.Instance: + r"""Gets details of a single Instance. + + Args: + request (google.cloud.notebooks_v1.types.GetInstanceRequest): + The request object. Request for getting a notebook + instance. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Instance: + The definition of a notebook + instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetInstanceRequest): + request = service.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_instance( + self, + request: service.CreateInstanceRequest = None, + *, + parent: str = None, + instance: gcn_instance.Instance = None, + instance_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given project and + location. + + Args: + request (google.cloud.notebooks_v1.types.CreateInstanceRequest): + The request object. Request for creating a notebook + instance. + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.notebooks_v1.types.Instance): + Required. The instance to be created. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. User-defined unique ID of + this instance. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateInstanceRequest): + request = service.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def register_instance( + self, + request: service.RegisterInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Registers an existing legacy notebook instance to the + Notebooks API server. Legacy instances are instances + created with the legacy Compute Engine calls. They are + not manageable by the Notebooks API out of the box. This + call makes these instances manageable by the Notebooks + API. + + Args: + request (google.cloud.notebooks_v1.types.RegisterInstanceRequest): + The request object. Request for registering a notebook + instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.RegisterInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RegisterInstanceRequest): + request = service.RegisterInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.register_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def set_instance_accelerator( + self, + request: service.SetInstanceAcceleratorRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the guest accelerators of a single Instance. + + Args: + request (google.cloud.notebooks_v1.types.SetInstanceAcceleratorRequest): + The request object. Request for setting instance + accelerator. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.SetInstanceAcceleratorRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.SetInstanceAcceleratorRequest): + request = service.SetInstanceAcceleratorRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_instance_accelerator] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def set_instance_machine_type( + self, + request: service.SetInstanceMachineTypeRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the machine type of a single Instance. + + Args: + request (google.cloud.notebooks_v1.types.SetInstanceMachineTypeRequest): + The request object. Request for setting instance machine + type. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.SetInstanceMachineTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.SetInstanceMachineTypeRequest): + request = service.SetInstanceMachineTypeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_instance_machine_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance_config( + self, + request: service.UpdateInstanceConfigRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update Notebook Instance configurations. + + Args: + request (google.cloud.notebooks_v1.types.UpdateInstanceConfigRequest): + The request object. Request for updating instance + configurations. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateInstanceConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateInstanceConfigRequest): + request = service.UpdateInstanceConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_shielded_instance_config( + self, + request: service.UpdateShieldedInstanceConfigRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the Shielded instance configuration of a + single Instance. + + Args: + request (google.cloud.notebooks_v1.types.UpdateShieldedInstanceConfigRequest): + The request object. Request for updating the Shielded + Instance config for a notebook instance. You can only + use this method on a stopped instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateShieldedInstanceConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateShieldedInstanceConfigRequest): + request = service.UpdateShieldedInstanceConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_shielded_instance_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def set_instance_labels( + self, + request: service.SetInstanceLabelsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Replaces all the labels of an Instance. + + Args: + request (google.cloud.notebooks_v1.types.SetInstanceLabelsRequest): + The request object. Request for setting instance labels. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.SetInstanceLabelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.SetInstanceLabelsRequest): + request = service.SetInstanceLabelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_instance_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: service.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + Args: + request (google.cloud.notebooks_v1.types.DeleteInstanceRequest): + The request object. Request for deleting a notebook + instance. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteInstanceRequest): + request = service.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def start_instance( + self, + request: service.StartInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a notebook instance. + + Args: + request (google.cloud.notebooks_v1.types.StartInstanceRequest): + The request object. Request for starting a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.StartInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.StartInstanceRequest): + request = service.StartInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def stop_instance( + self, + request: service.StopInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops a notebook instance. + + Args: + request (google.cloud.notebooks_v1.types.StopInstanceRequest): + The request object. Request for stopping a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.StopInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.StopInstanceRequest): + request = service.StopInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def reset_instance( + self, + request: service.ResetInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resets a notebook instance. + + Args: + request (google.cloud.notebooks_v1.types.ResetInstanceRequest): + The request object. Request for reseting a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.ResetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ResetInstanceRequest): + request = service.ResetInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reset_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def report_instance_info( + self, + request: service.ReportInstanceInfoRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Allows notebook instances to + report their latest instance information to the + Notebooks API server. The server will merge the reported + information to the instance metadata store. Do not use + this method directly. + + Args: + request (google.cloud.notebooks_v1.types.ReportInstanceInfoRequest): + The request object. Request for notebook instances to + report information to Notebooks API. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.ReportInstanceInfoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ReportInstanceInfoRequest): + request = service.ReportInstanceInfoRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.report_instance_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def is_instance_upgradeable( + self, + request: service.IsInstanceUpgradeableRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.IsInstanceUpgradeableResponse: + r"""Check if a notebook instance is upgradable. + + Args: + request (google.cloud.notebooks_v1.types.IsInstanceUpgradeableRequest): + The request object. Request for checking if a notebook + instance is upgradeable. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.IsInstanceUpgradeableResponse: + Response for checking if a notebook + instance is upgradeable. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.IsInstanceUpgradeableRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.IsInstanceUpgradeableRequest): + request = service.IsInstanceUpgradeableRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.is_instance_upgradeable] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("notebook_instance", request.notebook_instance),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_instance_health( + self, + request: service.GetInstanceHealthRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.GetInstanceHealthResponse: + r"""Check if a notebook instance is healthy. + + Args: + request (google.cloud.notebooks_v1.types.GetInstanceHealthRequest): + The request object. Request for checking if a notebook + instance is healthy. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.GetInstanceHealthResponse: + Response for checking if a notebook + instance is healthy. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetInstanceHealthRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetInstanceHealthRequest): + request = service.GetInstanceHealthRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def upgrade_instance( + self, + request: service.UpgradeInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrades a notebook instance to the latest version. + + Args: + request (google.cloud.notebooks_v1.types.UpgradeInstanceRequest): + The request object. Request for upgrading a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.UpgradeInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpgradeInstanceRequest): + request = service.UpgradeInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.upgrade_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def rollback_instance( + self, + request: service.RollbackInstanceRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Rollbacks a notebook instance to the previous + version. + + Args: + request (google.cloud.notebooks_v1.types.RollbackInstanceRequest): + The request object. Request for rollbacking a notebook + instance + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.RollbackInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.RollbackInstanceRequest): + request = service.RollbackInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def upgrade_instance_internal( + self, + request: service.UpgradeInstanceInternalRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Allows notebook instances to + call this endpoint to upgrade themselves. Do not use + this method directly. + + Args: + request (google.cloud.notebooks_v1.types.UpgradeInstanceInternalRequest): + The request object. Request for upgrading a notebook + instance from within the VM + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Instance` The + definition of a notebook instance. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.UpgradeInstanceInternalRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpgradeInstanceInternalRequest): + request = service.UpgradeInstanceInternalRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.upgrade_instance_internal + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.Instance, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_environments( + self, + request: service.ListEnvironmentsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsPager: + r"""Lists environments in a project. + + Args: + request (google.cloud.notebooks_v1.types.ListEnvironmentsRequest): + The request object. Request for listing environments. + parent (str): + Required. Format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListEnvironmentsPager: + Response for listing environments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListEnvironmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnvironmentsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_environment( + self, + request: service.GetEnvironmentRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.Environment: + r"""Gets details of a single Environment. + + Args: + request (google.cloud.notebooks_v1.types.GetEnvironmentRequest): + The request object. Request for getting a notebook + environment. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Environment: + Definition of a software environment + that is used to start a notebook + instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_environment( + self, + request: service.CreateEnvironmentRequest = None, + *, + parent: str = None, + environment: gcn_environment.Environment = None, + environment_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Environment. + + Args: + request (google.cloud.notebooks_v1.types.CreateEnvironmentRequest): + The request object. Request for creating a notebook + environment. + parent (str): + Required. Format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (google.cloud.notebooks_v1.types.Environment): + Required. The environment to be + created. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (str): + Required. User-defined unique ID of this environment. + The ``environment_id`` must be 1 to 63 characters long + and contain only lowercase letters, numeric characters, + and dashes. The first character must be a lowercase + letter and the last character cannot be a dash. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.notebooks_v1.types.Environment` Definition of a software environment that is used to start a notebook + instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment, environment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_environment.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_environment( + self, + request: service.DeleteEnvironmentRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Environment. + + Args: + request (google.cloud.notebooks_v1.types.DeleteEnvironmentRequest): + The request object. Request for deleting a notebook + environment. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_schedules( + self, + request: service.ListSchedulesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchedulesPager: + r"""Lists schedules in a given project and location. + + Args: + request (google.cloud.notebooks_v1.types.ListSchedulesRequest): + The request object. Request for listing scheduled + notebook job. + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListSchedulesPager: + Response for listing scheduled + notebook job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListSchedulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListSchedulesRequest): + request = service.ListSchedulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchedulesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_schedule( + self, + request: service.GetScheduleRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schedule.Schedule: + r"""Gets details of schedule + + Args: + request (google.cloud.notebooks_v1.types.GetScheduleRequest): + The request object. Request for getting scheduled + notebook. + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Schedule: + The definition of a schedule. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetScheduleRequest): + request = service.GetScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_schedule( + self, + request: service.DeleteScheduleRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes schedule and all underlying jobs + + Args: + request (google.cloud.notebooks_v1.types.DeleteScheduleRequest): + The request object. Request for deleting an Schedule + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteScheduleRequest): + request = service.DeleteScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_schedule( + self, + request: service.CreateScheduleRequest = None, + *, + parent: str = None, + schedule: gcn_schedule.Schedule = None, + schedule_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Scheduled Notebook in a given project + and location. + + Args: + request (google.cloud.notebooks_v1.types.CreateScheduleRequest): + The request object. Request for created scheduled + notebooks + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule (google.cloud.notebooks_v1.types.Schedule): + Required. The schedule to be created. + This corresponds to the ``schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_id (str): + Required. User-defined unique ID of + this schedule. + + This corresponds to the ``schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Schedule` The + definition of a schedule. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schedule, schedule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateScheduleRequest): + request = service.CreateScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schedule is not None: + request.schedule = schedule + if schedule_id is not None: + request.schedule_id = schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_schedule.Schedule, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def trigger_schedule( + self, + request: service.TriggerScheduleRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers execution of an existing schedule. + + Args: + request (google.cloud.notebooks_v1.types.TriggerScheduleRequest): + The request object. Request for created scheduled + notebooks + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Schedule` The + definition of a schedule. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.TriggerScheduleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.TriggerScheduleRequest): + request = service.TriggerScheduleRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.trigger_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + schedule.Schedule, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_executions( + self, + request: service.ListExecutionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExecutionsPager: + r"""Lists executions in a given project and location + + Args: + request (google.cloud.notebooks_v1.types.ListExecutionsRequest): + The request object. Request for listing scheduled + notebook executions. + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.services.notebook_service.pagers.ListExecutionsPager: + Response for listing scheduled + notebook executions + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListExecutionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListExecutionsRequest): + request = service.ListExecutionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_executions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExecutionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_execution( + self, + request: service.GetExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> execution.Execution: + r"""Gets details of executions + + Args: + request (google.cloud.notebooks_v1.types.GetExecutionRequest): + The request object. Request for getting scheduled + notebook execution + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/executions/{execution_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.notebooks_v1.types.Execution: + The definition of a single executed + notebook. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetExecutionRequest): + request = service.GetExecutionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_execution( + self, + request: service.DeleteExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes execution + + Args: + request (google.cloud.notebooks_v1.types.DeleteExecutionRequest): + The request object. Request for deleting a scheduled + notebook execution + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/executions/{execution_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteExecutionRequest): + request = service.DeleteExecutionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_execution( + self, + request: service.CreateExecutionRequest = None, + *, + parent: str = None, + execution: gcn_execution.Execution = None, + execution_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Scheduled Notebook in a given project + and location. + + Args: + request (google.cloud.notebooks_v1.types.CreateExecutionRequest): + The request object. Request to create notebook execution + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + execution (google.cloud.notebooks_v1.types.Execution): + Required. The execution to be + created. + + This corresponds to the ``execution`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + execution_id (str): + Required. User-defined unique ID of + this execution. + + This corresponds to the ``execution_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.notebooks_v1.types.Execution` The + definition of a single executed notebook. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, execution, execution_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateExecutionRequest): + request = service.CreateExecutionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if execution is not None: + request.execution = execution + if execution_id is not None: + request.execution_id = execution_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_execution.Execution, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("NotebookServiceClient",) diff --git a/google/cloud/notebooks_v1/services/notebook_service/pagers.py b/google/cloud/notebooks_v1/services/notebook_service/pagers.py new file mode 100644 index 0000000..dcee506 --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/pagers.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import service + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListInstancesResponse], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[instance.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListInstancesResponse]], + request: service.ListInstancesRequest, + response: service.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[service.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[instance.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnvironmentsPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListEnvironmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListEnvironmentsResponse], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListEnvironmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[environment.Environment]: + for page in self.pages: + yield from page.environments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnvironmentsAsyncPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListEnvironmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListEnvironmentsResponse]], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListEnvironmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[environment.Environment]: + async def async_generator(): + async for page in self.pages: + for response in page.environments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchedulesPager: + """A pager for iterating through ``list_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListSchedulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schedules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchedules`` requests and continue to iterate + through the ``schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSchedulesResponse], + request: service.ListSchedulesRequest, + response: service.ListSchedulesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListSchedulesRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListSchedulesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSchedulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[service.ListSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[schedule.Schedule]: + for page in self.pages: + yield from page.schedules + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchedulesAsyncPager: + """A pager for iterating through ``list_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListSchedulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schedules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchedules`` requests and continue to iterate + through the ``schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSchedulesResponse]], + request: service.ListSchedulesRequest, + response: service.ListSchedulesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListSchedulesRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListSchedulesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSchedulesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[service.ListSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[schedule.Schedule]: + async def async_generator(): + async for page in self.pages: + for response in page.schedules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExecutionsPager: + """A pager for iterating through ``list_executions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListExecutionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``executions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExecutions`` requests and continue to iterate + through the ``executions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListExecutionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListExecutionsResponse], + request: service.ListExecutionsRequest, + response: service.ListExecutionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListExecutionsRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListExecutionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListExecutionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[service.ListExecutionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[execution.Execution]: + for page in self.pages: + yield from page.executions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExecutionsAsyncPager: + """A pager for iterating through ``list_executions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.notebooks_v1.types.ListExecutionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``executions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExecutions`` requests and continue to iterate + through the ``executions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.notebooks_v1.types.ListExecutionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListExecutionsResponse]], + request: service.ListExecutionsRequest, + response: service.ListExecutionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.notebooks_v1.types.ListExecutionsRequest): + The initial request object. + response (google.cloud.notebooks_v1.types.ListExecutionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListExecutionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[service.ListExecutionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[execution.Execution]: + async def async_generator(): + async for page in self.pages: + for response in page.executions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/notebooks_v1/services/notebook_service/transports/__init__.py b/google/cloud/notebooks_v1/services/notebook_service/transports/__init__.py new file mode 100644 index 0000000..9c6f2a1 --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NotebookServiceTransport +from .grpc import NotebookServiceGrpcTransport +from .grpc_asyncio import NotebookServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[NotebookServiceTransport]] +_transport_registry["grpc"] = NotebookServiceGrpcTransport +_transport_registry["grpc_asyncio"] = NotebookServiceGrpcAsyncIOTransport + +__all__ = ( + "NotebookServiceTransport", + "NotebookServiceGrpcTransport", + "NotebookServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/notebooks_v1/services/notebook_service/transports/base.py b/google/cloud/notebooks_v1/services/notebook_service/transports/base.py new file mode 100644 index 0000000..aab5d20 --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/transports/base.py @@ -0,0 +1,578 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import service +from google.longrunning import operations_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class NotebookServiceTransport(abc.ABC): + """Abstract transport class for NotebookService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "notebooks.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, default_timeout=60.0, client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, default_timeout=60.0, client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, default_timeout=60.0, client_info=client_info, + ), + self.register_instance: gapic_v1.method.wrap_method( + self.register_instance, default_timeout=60.0, client_info=client_info, + ), + self.set_instance_accelerator: gapic_v1.method.wrap_method( + self.set_instance_accelerator, + default_timeout=60.0, + client_info=client_info, + ), + self.set_instance_machine_type: gapic_v1.method.wrap_method( + self.set_instance_machine_type, + default_timeout=60.0, + client_info=client_info, + ), + self.update_instance_config: gapic_v1.method.wrap_method( + self.update_instance_config, + default_timeout=60.0, + client_info=client_info, + ), + self.update_shielded_instance_config: gapic_v1.method.wrap_method( + self.update_shielded_instance_config, + default_timeout=60.0, + client_info=client_info, + ), + self.set_instance_labels: gapic_v1.method.wrap_method( + self.set_instance_labels, default_timeout=60.0, client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, default_timeout=60.0, client_info=client_info, + ), + self.start_instance: gapic_v1.method.wrap_method( + self.start_instance, default_timeout=60.0, client_info=client_info, + ), + self.stop_instance: gapic_v1.method.wrap_method( + self.stop_instance, default_timeout=60.0, client_info=client_info, + ), + self.reset_instance: gapic_v1.method.wrap_method( + self.reset_instance, default_timeout=60.0, client_info=client_info, + ), + self.report_instance_info: gapic_v1.method.wrap_method( + self.report_instance_info, + default_timeout=60.0, + client_info=client_info, + ), + self.is_instance_upgradeable: gapic_v1.method.wrap_method( + self.is_instance_upgradeable, + default_timeout=60.0, + client_info=client_info, + ), + self.get_instance_health: gapic_v1.method.wrap_method( + self.get_instance_health, default_timeout=60.0, client_info=client_info, + ), + self.upgrade_instance: gapic_v1.method.wrap_method( + self.upgrade_instance, default_timeout=60.0, client_info=client_info, + ), + self.rollback_instance: gapic_v1.method.wrap_method( + self.rollback_instance, default_timeout=60.0, client_info=client_info, + ), + self.upgrade_instance_internal: gapic_v1.method.wrap_method( + self.upgrade_instance_internal, + default_timeout=60.0, + client_info=client_info, + ), + self.list_environments: gapic_v1.method.wrap_method( + self.list_environments, default_timeout=60.0, client_info=client_info, + ), + self.get_environment: gapic_v1.method.wrap_method( + self.get_environment, default_timeout=60.0, client_info=client_info, + ), + self.create_environment: gapic_v1.method.wrap_method( + self.create_environment, default_timeout=60.0, client_info=client_info, + ), + self.delete_environment: gapic_v1.method.wrap_method( + self.delete_environment, default_timeout=60.0, client_info=client_info, + ), + self.list_schedules: gapic_v1.method.wrap_method( + self.list_schedules, default_timeout=60.0, client_info=client_info, + ), + self.get_schedule: gapic_v1.method.wrap_method( + self.get_schedule, default_timeout=60.0, client_info=client_info, + ), + self.delete_schedule: gapic_v1.method.wrap_method( + self.delete_schedule, default_timeout=60.0, client_info=client_info, + ), + self.create_schedule: gapic_v1.method.wrap_method( + self.create_schedule, default_timeout=60.0, client_info=client_info, + ), + self.trigger_schedule: gapic_v1.method.wrap_method( + self.trigger_schedule, default_timeout=None, client_info=client_info, + ), + self.list_executions: gapic_v1.method.wrap_method( + self.list_executions, default_timeout=60.0, client_info=client_info, + ), + self.get_execution: gapic_v1.method.wrap_method( + self.get_execution, default_timeout=60.0, client_info=client_info, + ), + self.delete_execution: gapic_v1.method.wrap_method( + self.delete_execution, default_timeout=60.0, client_info=client_info, + ), + self.create_execution: gapic_v1.method.wrap_method( + self.create_execution, default_timeout=60.0, client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], + Union[service.ListInstancesResponse, Awaitable[service.ListInstancesResponse]], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> Callable[ + [service.GetInstanceRequest], + Union[instance.Instance, Awaitable[instance.Instance]], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> Callable[ + [service.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def register_instance( + self, + ) -> Callable[ + [service.RegisterInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def set_instance_accelerator( + self, + ) -> Callable[ + [service.SetInstanceAcceleratorRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def set_instance_machine_type( + self, + ) -> Callable[ + [service.SetInstanceMachineTypeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance_config( + self, + ) -> Callable[ + [service.UpdateInstanceConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_shielded_instance_config( + self, + ) -> Callable[ + [service.UpdateShieldedInstanceConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def set_instance_labels( + self, + ) -> Callable[ + [service.SetInstanceLabelsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> Callable[ + [service.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def start_instance( + self, + ) -> Callable[ + [service.StartInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def stop_instance( + self, + ) -> Callable[ + [service.StopInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def reset_instance( + self, + ) -> Callable[ + [service.ResetInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def report_instance_info( + self, + ) -> Callable[ + [service.ReportInstanceInfoRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def is_instance_upgradeable( + self, + ) -> Callable[ + [service.IsInstanceUpgradeableRequest], + Union[ + service.IsInstanceUpgradeableResponse, + Awaitable[service.IsInstanceUpgradeableResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_instance_health( + self, + ) -> Callable[ + [service.GetInstanceHealthRequest], + Union[ + service.GetInstanceHealthResponse, + Awaitable[service.GetInstanceHealthResponse], + ], + ]: + raise NotImplementedError() + + @property + def upgrade_instance( + self, + ) -> Callable[ + [service.UpgradeInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def rollback_instance( + self, + ) -> Callable[ + [service.RollbackInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def upgrade_instance_internal( + self, + ) -> Callable[ + [service.UpgradeInstanceInternalRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_environments( + self, + ) -> Callable[ + [service.ListEnvironmentsRequest], + Union[ + service.ListEnvironmentsResponse, + Awaitable[service.ListEnvironmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_environment( + self, + ) -> Callable[ + [service.GetEnvironmentRequest], + Union[environment.Environment, Awaitable[environment.Environment]], + ]: + raise NotImplementedError() + + @property + def create_environment( + self, + ) -> Callable[ + [service.CreateEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_environment( + self, + ) -> Callable[ + [service.DeleteEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_schedules( + self, + ) -> Callable[ + [service.ListSchedulesRequest], + Union[service.ListSchedulesResponse, Awaitable[service.ListSchedulesResponse]], + ]: + raise NotImplementedError() + + @property + def get_schedule( + self, + ) -> Callable[ + [service.GetScheduleRequest], + Union[schedule.Schedule, Awaitable[schedule.Schedule]], + ]: + raise NotImplementedError() + + @property + def delete_schedule( + self, + ) -> Callable[ + [service.DeleteScheduleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_schedule( + self, + ) -> Callable[ + [service.CreateScheduleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def trigger_schedule( + self, + ) -> Callable[ + [service.TriggerScheduleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_executions( + self, + ) -> Callable[ + [service.ListExecutionsRequest], + Union[ + service.ListExecutionsResponse, Awaitable[service.ListExecutionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_execution( + self, + ) -> Callable[ + [service.GetExecutionRequest], + Union[execution.Execution, Awaitable[execution.Execution]], + ]: + raise NotImplementedError() + + @property + def delete_execution( + self, + ) -> Callable[ + [service.DeleteExecutionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_execution( + self, + ) -> Callable[ + [service.CreateExecutionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + +__all__ = ("NotebookServiceTransport",) diff --git a/google/cloud/notebooks_v1/services/notebook_service/transports/grpc.py b/google/cloud/notebooks_v1/services/notebook_service/transports/grpc.py new file mode 100644 index 0000000..6bc7f41 --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/transports/grpc.py @@ -0,0 +1,1103 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import service +from google.longrunning import operations_pb2 # type: ignore +from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO + + +class NotebookServiceGrpcTransport(NotebookServiceTransport): + """gRPC backend transport for NotebookService. + + API v1 service for Cloud AI Platform Notebooks. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[[service.ListInstancesRequest], service.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance(self) -> Callable[[service.GetInstanceRequest], instance.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=instance.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def register_instance( + self, + ) -> Callable[[service.RegisterInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the register instance method over gRPC. + + Registers an existing legacy notebook instance to the + Notebooks API server. Legacy instances are instances + created with the legacy Compute Engine calls. They are + not manageable by the Notebooks API out of the box. This + call makes these instances manageable by the Notebooks + API. + + Returns: + Callable[[~.RegisterInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_instance" not in self._stubs: + self._stubs["register_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/RegisterInstance", + request_serializer=service.RegisterInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_instance"] + + @property + def set_instance_accelerator( + self, + ) -> Callable[[service.SetInstanceAcceleratorRequest], operations_pb2.Operation]: + r"""Return a callable for the set instance accelerator method over gRPC. + + Updates the guest accelerators of a single Instance. + + Returns: + Callable[[~.SetInstanceAcceleratorRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_instance_accelerator" not in self._stubs: + self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/SetInstanceAccelerator", + request_serializer=service.SetInstanceAcceleratorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["set_instance_accelerator"] + + @property + def set_instance_machine_type( + self, + ) -> Callable[[service.SetInstanceMachineTypeRequest], operations_pb2.Operation]: + r"""Return a callable for the set instance machine type method over gRPC. + + Updates the machine type of a single Instance. + + Returns: + Callable[[~.SetInstanceMachineTypeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_instance_machine_type" not in self._stubs: + self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/SetInstanceMachineType", + request_serializer=service.SetInstanceMachineTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["set_instance_machine_type"] + + @property + def update_instance_config( + self, + ) -> Callable[[service.UpdateInstanceConfigRequest], operations_pb2.Operation]: + r"""Return a callable for the update instance config method over gRPC. + + Update Notebook Instance configurations. + + Returns: + Callable[[~.UpdateInstanceConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance_config" not in self._stubs: + self._stubs["update_instance_config"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpdateInstanceConfig", + request_serializer=service.UpdateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance_config"] + + @property + def update_shielded_instance_config( + self, + ) -> Callable[ + [service.UpdateShieldedInstanceConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update shielded instance + config method over gRPC. + + Updates the Shielded instance configuration of a + single Instance. + + Returns: + Callable[[~.UpdateShieldedInstanceConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_shielded_instance_config" not in self._stubs: + self._stubs[ + "update_shielded_instance_config" + ] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpdateShieldedInstanceConfig", + request_serializer=service.UpdateShieldedInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_shielded_instance_config"] + + @property + def set_instance_labels( + self, + ) -> Callable[[service.SetInstanceLabelsRequest], operations_pb2.Operation]: + r"""Return a callable for the set instance labels method over gRPC. + + Replaces all the labels of an Instance. + + Returns: + Callable[[~.SetInstanceLabelsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_instance_labels" not in self._stubs: + self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/SetInstanceLabels", + request_serializer=service.SetInstanceLabelsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["set_instance_labels"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def start_instance( + self, + ) -> Callable[[service.StartInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the start instance method over gRPC. + + Starts a notebook instance. + + Returns: + Callable[[~.StartInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_instance" not in self._stubs: + self._stubs["start_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/StartInstance", + request_serializer=service.StartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_instance"] + + @property + def stop_instance( + self, + ) -> Callable[[service.StopInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the stop instance method over gRPC. + + Stops a notebook instance. + + Returns: + Callable[[~.StopInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_instance" not in self._stubs: + self._stubs["stop_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/StopInstance", + request_serializer=service.StopInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_instance"] + + @property + def reset_instance( + self, + ) -> Callable[[service.ResetInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the reset instance method over gRPC. + + Resets a notebook instance. + + Returns: + Callable[[~.ResetInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_instance" not in self._stubs: + self._stubs["reset_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ResetInstance", + request_serializer=service.ResetInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_instance"] + + @property + def report_instance_info( + self, + ) -> Callable[[service.ReportInstanceInfoRequest], operations_pb2.Operation]: + r"""Return a callable for the report instance info method over gRPC. + + Allows notebook instances to + report their latest instance information to the + Notebooks API server. The server will merge the reported + information to the instance metadata store. Do not use + this method directly. + + Returns: + Callable[[~.ReportInstanceInfoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "report_instance_info" not in self._stubs: + self._stubs["report_instance_info"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ReportInstanceInfo", + request_serializer=service.ReportInstanceInfoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["report_instance_info"] + + @property + def is_instance_upgradeable( + self, + ) -> Callable[ + [service.IsInstanceUpgradeableRequest], service.IsInstanceUpgradeableResponse + ]: + r"""Return a callable for the is instance upgradeable method over gRPC. + + Check if a notebook instance is upgradable. + + Returns: + Callable[[~.IsInstanceUpgradeableRequest], + ~.IsInstanceUpgradeableResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "is_instance_upgradeable" not in self._stubs: + self._stubs["is_instance_upgradeable"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/IsInstanceUpgradeable", + request_serializer=service.IsInstanceUpgradeableRequest.serialize, + response_deserializer=service.IsInstanceUpgradeableResponse.deserialize, + ) + return self._stubs["is_instance_upgradeable"] + + @property + def get_instance_health( + self, + ) -> Callable[ + [service.GetInstanceHealthRequest], service.GetInstanceHealthResponse + ]: + r"""Return a callable for the get instance health method over gRPC. + + Check if a notebook instance is healthy. + + Returns: + Callable[[~.GetInstanceHealthRequest], + ~.GetInstanceHealthResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance_health" not in self._stubs: + self._stubs["get_instance_health"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetInstanceHealth", + request_serializer=service.GetInstanceHealthRequest.serialize, + response_deserializer=service.GetInstanceHealthResponse.deserialize, + ) + return self._stubs["get_instance_health"] + + @property + def upgrade_instance( + self, + ) -> Callable[[service.UpgradeInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the upgrade instance method over gRPC. + + Upgrades a notebook instance to the latest version. + + Returns: + Callable[[~.UpgradeInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_instance" not in self._stubs: + self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpgradeInstance", + request_serializer=service.UpgradeInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_instance"] + + @property + def rollback_instance( + self, + ) -> Callable[[service.RollbackInstanceRequest], operations_pb2.Operation]: + r"""Return a callable for the rollback instance method over gRPC. + + Rollbacks a notebook instance to the previous + version. + + Returns: + Callable[[~.RollbackInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_instance" not in self._stubs: + self._stubs["rollback_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/RollbackInstance", + request_serializer=service.RollbackInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_instance"] + + @property + def upgrade_instance_internal( + self, + ) -> Callable[[service.UpgradeInstanceInternalRequest], operations_pb2.Operation]: + r"""Return a callable for the upgrade instance internal method over gRPC. + + Allows notebook instances to + call this endpoint to upgrade themselves. Do not use + this method directly. + + Returns: + Callable[[~.UpgradeInstanceInternalRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_instance_internal" not in self._stubs: + self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpgradeInstanceInternal", + request_serializer=service.UpgradeInstanceInternalRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_instance_internal"] + + @property + def list_environments( + self, + ) -> Callable[[service.ListEnvironmentsRequest], service.ListEnvironmentsResponse]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments in a project. + + Returns: + Callable[[~.ListEnvironmentsRequest], + ~.ListEnvironmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_environments" not in self._stubs: + self._stubs["list_environments"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListEnvironments", + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs["list_environments"] + + @property + def get_environment( + self, + ) -> Callable[[service.GetEnvironmentRequest], environment.Environment]: + r"""Return a callable for the get environment method over gRPC. + + Gets details of a single Environment. + + Returns: + Callable[[~.GetEnvironmentRequest], + ~.Environment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_environment" not in self._stubs: + self._stubs["get_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetEnvironment", + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=environment.Environment.deserialize, + ) + return self._stubs["get_environment"] + + @property + def create_environment( + self, + ) -> Callable[[service.CreateEnvironmentRequest], operations_pb2.Operation]: + r"""Return a callable for the create environment method over gRPC. + + Creates a new Environment. + + Returns: + Callable[[~.CreateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_environment" not in self._stubs: + self._stubs["create_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateEnvironment", + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_environment"] + + @property + def delete_environment( + self, + ) -> Callable[[service.DeleteEnvironmentRequest], operations_pb2.Operation]: + r"""Return a callable for the delete environment method over gRPC. + + Deletes a single Environment. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_environment" not in self._stubs: + self._stubs["delete_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteEnvironment", + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_environment"] + + @property + def list_schedules( + self, + ) -> Callable[[service.ListSchedulesRequest], service.ListSchedulesResponse]: + r"""Return a callable for the list schedules method over gRPC. + + Lists schedules in a given project and location. + + Returns: + Callable[[~.ListSchedulesRequest], + ~.ListSchedulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schedules" not in self._stubs: + self._stubs["list_schedules"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListSchedules", + request_serializer=service.ListSchedulesRequest.serialize, + response_deserializer=service.ListSchedulesResponse.deserialize, + ) + return self._stubs["list_schedules"] + + @property + def get_schedule(self) -> Callable[[service.GetScheduleRequest], schedule.Schedule]: + r"""Return a callable for the get schedule method over gRPC. + + Gets details of schedule + + Returns: + Callable[[~.GetScheduleRequest], + ~.Schedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schedule" not in self._stubs: + self._stubs["get_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetSchedule", + request_serializer=service.GetScheduleRequest.serialize, + response_deserializer=schedule.Schedule.deserialize, + ) + return self._stubs["get_schedule"] + + @property + def delete_schedule( + self, + ) -> Callable[[service.DeleteScheduleRequest], operations_pb2.Operation]: + r"""Return a callable for the delete schedule method over gRPC. + + Deletes schedule and all underlying jobs + + Returns: + Callable[[~.DeleteScheduleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schedule" not in self._stubs: + self._stubs["delete_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteSchedule", + request_serializer=service.DeleteScheduleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_schedule"] + + @property + def create_schedule( + self, + ) -> Callable[[service.CreateScheduleRequest], operations_pb2.Operation]: + r"""Return a callable for the create schedule method over gRPC. + + Creates a new Scheduled Notebook in a given project + and location. + + Returns: + Callable[[~.CreateScheduleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schedule" not in self._stubs: + self._stubs["create_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateSchedule", + request_serializer=service.CreateScheduleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_schedule"] + + @property + def trigger_schedule( + self, + ) -> Callable[[service.TriggerScheduleRequest], operations_pb2.Operation]: + r"""Return a callable for the trigger schedule method over gRPC. + + Triggers execution of an existing schedule. + + Returns: + Callable[[~.TriggerScheduleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_schedule" not in self._stubs: + self._stubs["trigger_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/TriggerSchedule", + request_serializer=service.TriggerScheduleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_schedule"] + + @property + def list_executions( + self, + ) -> Callable[[service.ListExecutionsRequest], service.ListExecutionsResponse]: + r"""Return a callable for the list executions method over gRPC. + + Lists executions in a given project and location + + Returns: + Callable[[~.ListExecutionsRequest], + ~.ListExecutionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_executions" not in self._stubs: + self._stubs["list_executions"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListExecutions", + request_serializer=service.ListExecutionsRequest.serialize, + response_deserializer=service.ListExecutionsResponse.deserialize, + ) + return self._stubs["list_executions"] + + @property + def get_execution( + self, + ) -> Callable[[service.GetExecutionRequest], execution.Execution]: + r"""Return a callable for the get execution method over gRPC. + + Gets details of executions + + Returns: + Callable[[~.GetExecutionRequest], + ~.Execution]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_execution" not in self._stubs: + self._stubs["get_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetExecution", + request_serializer=service.GetExecutionRequest.serialize, + response_deserializer=execution.Execution.deserialize, + ) + return self._stubs["get_execution"] + + @property + def delete_execution( + self, + ) -> Callable[[service.DeleteExecutionRequest], operations_pb2.Operation]: + r"""Return a callable for the delete execution method over gRPC. + + Deletes execution + + Returns: + Callable[[~.DeleteExecutionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_execution" not in self._stubs: + self._stubs["delete_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteExecution", + request_serializer=service.DeleteExecutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_execution"] + + @property + def create_execution( + self, + ) -> Callable[[service.CreateExecutionRequest], operations_pb2.Operation]: + r"""Return a callable for the create execution method over gRPC. + + Creates a new Scheduled Notebook in a given project + and location. + + Returns: + Callable[[~.CreateExecutionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_execution" not in self._stubs: + self._stubs["create_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateExecution", + request_serializer=service.CreateExecutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_execution"] + + +__all__ = ("NotebookServiceGrpcTransport",) diff --git a/google/cloud/notebooks_v1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/notebooks_v1/services/notebook_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..64c472d --- /dev/null +++ b/google/cloud/notebooks_v1/services/notebook_service/transports/grpc_asyncio.py @@ -0,0 +1,1150 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import service +from google.longrunning import operations_pb2 # type: ignore +from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import NotebookServiceGrpcTransport + + +class NotebookServiceGrpcAsyncIOTransport(NotebookServiceTransport): + """gRPC AsyncIO backend transport for NotebookService. + + API v1 service for Cloud AI Platform Notebooks. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "notebooks.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [service.ListInstancesRequest], Awaitable[service.ListInstancesResponse] + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists instances in a given project and location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListInstances", + request_serializer=service.ListInstancesRequest.serialize, + response_deserializer=service.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[service.GetInstanceRequest], Awaitable[instance.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetInstance", + request_serializer=service.GetInstanceRequest.serialize, + response_deserializer=instance.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given project and + location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateInstance", + request_serializer=service.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def register_instance( + self, + ) -> Callable[ + [service.RegisterInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the register instance method over gRPC. + + Registers an existing legacy notebook instance to the + Notebooks API server. Legacy instances are instances + created with the legacy Compute Engine calls. They are + not manageable by the Notebooks API out of the box. This + call makes these instances manageable by the Notebooks + API. + + Returns: + Callable[[~.RegisterInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "register_instance" not in self._stubs: + self._stubs["register_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/RegisterInstance", + request_serializer=service.RegisterInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["register_instance"] + + @property + def set_instance_accelerator( + self, + ) -> Callable[ + [service.SetInstanceAcceleratorRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the set instance accelerator method over gRPC. + + Updates the guest accelerators of a single Instance. + + Returns: + Callable[[~.SetInstanceAcceleratorRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_instance_accelerator" not in self._stubs: + self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/SetInstanceAccelerator", + request_serializer=service.SetInstanceAcceleratorRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["set_instance_accelerator"] + + @property + def set_instance_machine_type( + self, + ) -> Callable[ + [service.SetInstanceMachineTypeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the set instance machine type method over gRPC. + + Updates the machine type of a single Instance. + + Returns: + Callable[[~.SetInstanceMachineTypeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_instance_machine_type" not in self._stubs: + self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/SetInstanceMachineType", + request_serializer=service.SetInstanceMachineTypeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["set_instance_machine_type"] + + @property + def update_instance_config( + self, + ) -> Callable[ + [service.UpdateInstanceConfigRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update instance config method over gRPC. + + Update Notebook Instance configurations. + + Returns: + Callable[[~.UpdateInstanceConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance_config" not in self._stubs: + self._stubs["update_instance_config"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpdateInstanceConfig", + request_serializer=service.UpdateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_instance_config"] + + @property + def update_shielded_instance_config( + self, + ) -> Callable[ + [service.UpdateShieldedInstanceConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update shielded instance + config method over gRPC. + + Updates the Shielded instance configuration of a + single Instance. + + Returns: + Callable[[~.UpdateShieldedInstanceConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_shielded_instance_config" not in self._stubs: + self._stubs[ + "update_shielded_instance_config" + ] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpdateShieldedInstanceConfig", + request_serializer=service.UpdateShieldedInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_shielded_instance_config"] + + @property + def set_instance_labels( + self, + ) -> Callable[ + [service.SetInstanceLabelsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the set instance labels method over gRPC. + + Replaces all the labels of an Instance. + + Returns: + Callable[[~.SetInstanceLabelsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_instance_labels" not in self._stubs: + self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/SetInstanceLabels", + request_serializer=service.SetInstanceLabelsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["set_instance_labels"] + + @property + def delete_instance( + self, + ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteInstance", + request_serializer=service.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def start_instance( + self, + ) -> Callable[[service.StartInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the start instance method over gRPC. + + Starts a notebook instance. + + Returns: + Callable[[~.StartInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_instance" not in self._stubs: + self._stubs["start_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/StartInstance", + request_serializer=service.StartInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_instance"] + + @property + def stop_instance( + self, + ) -> Callable[[service.StopInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the stop instance method over gRPC. + + Stops a notebook instance. + + Returns: + Callable[[~.StopInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_instance" not in self._stubs: + self._stubs["stop_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/StopInstance", + request_serializer=service.StopInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_instance"] + + @property + def reset_instance( + self, + ) -> Callable[[service.ResetInstanceRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the reset instance method over gRPC. + + Resets a notebook instance. + + Returns: + Callable[[~.ResetInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reset_instance" not in self._stubs: + self._stubs["reset_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ResetInstance", + request_serializer=service.ResetInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["reset_instance"] + + @property + def report_instance_info( + self, + ) -> Callable[ + [service.ReportInstanceInfoRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the report instance info method over gRPC. + + Allows notebook instances to + report their latest instance information to the + Notebooks API server. The server will merge the reported + information to the instance metadata store. Do not use + this method directly. + + Returns: + Callable[[~.ReportInstanceInfoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "report_instance_info" not in self._stubs: + self._stubs["report_instance_info"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ReportInstanceInfo", + request_serializer=service.ReportInstanceInfoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["report_instance_info"] + + @property + def is_instance_upgradeable( + self, + ) -> Callable[ + [service.IsInstanceUpgradeableRequest], + Awaitable[service.IsInstanceUpgradeableResponse], + ]: + r"""Return a callable for the is instance upgradeable method over gRPC. + + Check if a notebook instance is upgradable. + + Returns: + Callable[[~.IsInstanceUpgradeableRequest], + Awaitable[~.IsInstanceUpgradeableResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "is_instance_upgradeable" not in self._stubs: + self._stubs["is_instance_upgradeable"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/IsInstanceUpgradeable", + request_serializer=service.IsInstanceUpgradeableRequest.serialize, + response_deserializer=service.IsInstanceUpgradeableResponse.deserialize, + ) + return self._stubs["is_instance_upgradeable"] + + @property + def get_instance_health( + self, + ) -> Callable[ + [service.GetInstanceHealthRequest], Awaitable[service.GetInstanceHealthResponse] + ]: + r"""Return a callable for the get instance health method over gRPC. + + Check if a notebook instance is healthy. + + Returns: + Callable[[~.GetInstanceHealthRequest], + Awaitable[~.GetInstanceHealthResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance_health" not in self._stubs: + self._stubs["get_instance_health"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetInstanceHealth", + request_serializer=service.GetInstanceHealthRequest.serialize, + response_deserializer=service.GetInstanceHealthResponse.deserialize, + ) + return self._stubs["get_instance_health"] + + @property + def upgrade_instance( + self, + ) -> Callable[ + [service.UpgradeInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the upgrade instance method over gRPC. + + Upgrades a notebook instance to the latest version. + + Returns: + Callable[[~.UpgradeInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_instance" not in self._stubs: + self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpgradeInstance", + request_serializer=service.UpgradeInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_instance"] + + @property + def rollback_instance( + self, + ) -> Callable[ + [service.RollbackInstanceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the rollback instance method over gRPC. + + Rollbacks a notebook instance to the previous + version. + + Returns: + Callable[[~.RollbackInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_instance" not in self._stubs: + self._stubs["rollback_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/RollbackInstance", + request_serializer=service.RollbackInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_instance"] + + @property + def upgrade_instance_internal( + self, + ) -> Callable[ + [service.UpgradeInstanceInternalRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the upgrade instance internal method over gRPC. + + Allows notebook instances to + call this endpoint to upgrade themselves. Do not use + this method directly. + + Returns: + Callable[[~.UpgradeInstanceInternalRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upgrade_instance_internal" not in self._stubs: + self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/UpgradeInstanceInternal", + request_serializer=service.UpgradeInstanceInternalRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["upgrade_instance_internal"] + + @property + def list_environments( + self, + ) -> Callable[ + [service.ListEnvironmentsRequest], Awaitable[service.ListEnvironmentsResponse] + ]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments in a project. + + Returns: + Callable[[~.ListEnvironmentsRequest], + Awaitable[~.ListEnvironmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_environments" not in self._stubs: + self._stubs["list_environments"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListEnvironments", + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs["list_environments"] + + @property + def get_environment( + self, + ) -> Callable[[service.GetEnvironmentRequest], Awaitable[environment.Environment]]: + r"""Return a callable for the get environment method over gRPC. + + Gets details of a single Environment. + + Returns: + Callable[[~.GetEnvironmentRequest], + Awaitable[~.Environment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_environment" not in self._stubs: + self._stubs["get_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetEnvironment", + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=environment.Environment.deserialize, + ) + return self._stubs["get_environment"] + + @property + def create_environment( + self, + ) -> Callable[ + [service.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create environment method over gRPC. + + Creates a new Environment. + + Returns: + Callable[[~.CreateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_environment" not in self._stubs: + self._stubs["create_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateEnvironment", + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_environment"] + + @property + def delete_environment( + self, + ) -> Callable[ + [service.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete environment method over gRPC. + + Deletes a single Environment. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_environment" not in self._stubs: + self._stubs["delete_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteEnvironment", + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_environment"] + + @property + def list_schedules( + self, + ) -> Callable[ + [service.ListSchedulesRequest], Awaitable[service.ListSchedulesResponse] + ]: + r"""Return a callable for the list schedules method over gRPC. + + Lists schedules in a given project and location. + + Returns: + Callable[[~.ListSchedulesRequest], + Awaitable[~.ListSchedulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schedules" not in self._stubs: + self._stubs["list_schedules"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListSchedules", + request_serializer=service.ListSchedulesRequest.serialize, + response_deserializer=service.ListSchedulesResponse.deserialize, + ) + return self._stubs["list_schedules"] + + @property + def get_schedule( + self, + ) -> Callable[[service.GetScheduleRequest], Awaitable[schedule.Schedule]]: + r"""Return a callable for the get schedule method over gRPC. + + Gets details of schedule + + Returns: + Callable[[~.GetScheduleRequest], + Awaitable[~.Schedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schedule" not in self._stubs: + self._stubs["get_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetSchedule", + request_serializer=service.GetScheduleRequest.serialize, + response_deserializer=schedule.Schedule.deserialize, + ) + return self._stubs["get_schedule"] + + @property + def delete_schedule( + self, + ) -> Callable[[service.DeleteScheduleRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete schedule method over gRPC. + + Deletes schedule and all underlying jobs + + Returns: + Callable[[~.DeleteScheduleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schedule" not in self._stubs: + self._stubs["delete_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteSchedule", + request_serializer=service.DeleteScheduleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_schedule"] + + @property + def create_schedule( + self, + ) -> Callable[[service.CreateScheduleRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create schedule method over gRPC. + + Creates a new Scheduled Notebook in a given project + and location. + + Returns: + Callable[[~.CreateScheduleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schedule" not in self._stubs: + self._stubs["create_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateSchedule", + request_serializer=service.CreateScheduleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_schedule"] + + @property + def trigger_schedule( + self, + ) -> Callable[ + [service.TriggerScheduleRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the trigger schedule method over gRPC. + + Triggers execution of an existing schedule. + + Returns: + Callable[[~.TriggerScheduleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_schedule" not in self._stubs: + self._stubs["trigger_schedule"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/TriggerSchedule", + request_serializer=service.TriggerScheduleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_schedule"] + + @property + def list_executions( + self, + ) -> Callable[ + [service.ListExecutionsRequest], Awaitable[service.ListExecutionsResponse] + ]: + r"""Return a callable for the list executions method over gRPC. + + Lists executions in a given project and location + + Returns: + Callable[[~.ListExecutionsRequest], + Awaitable[~.ListExecutionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_executions" not in self._stubs: + self._stubs["list_executions"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/ListExecutions", + request_serializer=service.ListExecutionsRequest.serialize, + response_deserializer=service.ListExecutionsResponse.deserialize, + ) + return self._stubs["list_executions"] + + @property + def get_execution( + self, + ) -> Callable[[service.GetExecutionRequest], Awaitable[execution.Execution]]: + r"""Return a callable for the get execution method over gRPC. + + Gets details of executions + + Returns: + Callable[[~.GetExecutionRequest], + Awaitable[~.Execution]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_execution" not in self._stubs: + self._stubs["get_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/GetExecution", + request_serializer=service.GetExecutionRequest.serialize, + response_deserializer=execution.Execution.deserialize, + ) + return self._stubs["get_execution"] + + @property + def delete_execution( + self, + ) -> Callable[ + [service.DeleteExecutionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete execution method over gRPC. + + Deletes execution + + Returns: + Callable[[~.DeleteExecutionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_execution" not in self._stubs: + self._stubs["delete_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/DeleteExecution", + request_serializer=service.DeleteExecutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_execution"] + + @property + def create_execution( + self, + ) -> Callable[ + [service.CreateExecutionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create execution method over gRPC. + + Creates a new Scheduled Notebook in a given project + and location. + + Returns: + Callable[[~.CreateExecutionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_execution" not in self._stubs: + self._stubs["create_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.notebooks.v1.NotebookService/CreateExecution", + request_serializer=service.CreateExecutionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_execution"] + + +__all__ = ("NotebookServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/notebooks_v1/types/__init__.py b/google/cloud/notebooks_v1/types/__init__.py new file mode 100644 index 0000000..e4a504b --- /dev/null +++ b/google/cloud/notebooks_v1/types/__init__.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .environment import ( + ContainerImage, + Environment, + VmImage, +) +from .event import Event +from .execution import ( + Execution, + ExecutionTemplate, +) +from .instance import ( + Instance, + ReservationAffinity, +) +from .instance_config import InstanceConfig +from .managed_service import ( + CreateRuntimeRequest, + DeleteRuntimeRequest, + GetRuntimeRequest, + ListRuntimesRequest, + ListRuntimesResponse, + ReportRuntimeEventRequest, + ResetRuntimeRequest, + StartRuntimeRequest, + StopRuntimeRequest, + SwitchRuntimeRequest, +) +from .runtime import ( + EncryptionConfig, + LocalDisk, + LocalDiskInitializeParams, + Runtime, + RuntimeAcceleratorConfig, + RuntimeAccessConfig, + RuntimeMetrics, + RuntimeShieldedInstanceConfig, + RuntimeSoftwareConfig, + VirtualMachine, + VirtualMachineConfig, +) +from .schedule import Schedule +from .service import ( + CreateEnvironmentRequest, + CreateExecutionRequest, + CreateInstanceRequest, + CreateScheduleRequest, + DeleteEnvironmentRequest, + DeleteExecutionRequest, + DeleteInstanceRequest, + DeleteScheduleRequest, + GetEnvironmentRequest, + GetExecutionRequest, + GetInstanceHealthRequest, + GetInstanceHealthResponse, + GetInstanceRequest, + GetScheduleRequest, + IsInstanceUpgradeableRequest, + IsInstanceUpgradeableResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, + ListExecutionsRequest, + ListExecutionsResponse, + ListInstancesRequest, + ListInstancesResponse, + ListSchedulesRequest, + ListSchedulesResponse, + OperationMetadata, + RegisterInstanceRequest, + ReportInstanceInfoRequest, + ResetInstanceRequest, + RollbackInstanceRequest, + SetInstanceAcceleratorRequest, + SetInstanceLabelsRequest, + SetInstanceMachineTypeRequest, + StartInstanceRequest, + StopInstanceRequest, + TriggerScheduleRequest, + UpdateInstanceConfigRequest, + UpdateShieldedInstanceConfigRequest, + UpgradeInstanceInternalRequest, + UpgradeInstanceRequest, +) + +__all__ = ( + "ContainerImage", + "Environment", + "VmImage", + "Event", + "Execution", + "ExecutionTemplate", + "Instance", + "ReservationAffinity", + "InstanceConfig", + "CreateRuntimeRequest", + "DeleteRuntimeRequest", + "GetRuntimeRequest", + "ListRuntimesRequest", + "ListRuntimesResponse", + "ReportRuntimeEventRequest", + "ResetRuntimeRequest", + "StartRuntimeRequest", + "StopRuntimeRequest", + "SwitchRuntimeRequest", + "EncryptionConfig", + "LocalDisk", + "LocalDiskInitializeParams", + "Runtime", + "RuntimeAcceleratorConfig", + "RuntimeAccessConfig", + "RuntimeMetrics", + "RuntimeShieldedInstanceConfig", + "RuntimeSoftwareConfig", + "VirtualMachine", + "VirtualMachineConfig", + "Schedule", + "CreateEnvironmentRequest", + "CreateExecutionRequest", + "CreateInstanceRequest", + "CreateScheduleRequest", + "DeleteEnvironmentRequest", + "DeleteExecutionRequest", + "DeleteInstanceRequest", + "DeleteScheduleRequest", + "GetEnvironmentRequest", + "GetExecutionRequest", + "GetInstanceHealthRequest", + "GetInstanceHealthResponse", + "GetInstanceRequest", + "GetScheduleRequest", + "IsInstanceUpgradeableRequest", + "IsInstanceUpgradeableResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "ListExecutionsRequest", + "ListExecutionsResponse", + "ListInstancesRequest", + "ListInstancesResponse", + "ListSchedulesRequest", + "ListSchedulesResponse", + "OperationMetadata", + "RegisterInstanceRequest", + "ReportInstanceInfoRequest", + "ResetInstanceRequest", + "RollbackInstanceRequest", + "SetInstanceAcceleratorRequest", + "SetInstanceLabelsRequest", + "SetInstanceMachineTypeRequest", + "StartInstanceRequest", + "StopInstanceRequest", + "TriggerScheduleRequest", + "UpdateInstanceConfigRequest", + "UpdateShieldedInstanceConfigRequest", + "UpgradeInstanceInternalRequest", + "UpgradeInstanceRequest", +) diff --git a/google/cloud/notebooks_v1/types/environment.py b/google/cloud/notebooks_v1/types/environment.py new file mode 100644 index 0000000..6243552 --- /dev/null +++ b/google/cloud/notebooks_v1/types/environment.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", + manifest={"Environment", "VmImage", "ContainerImage",}, +) + + +class Environment(proto.Message): + r"""Definition of a software environment that is used to start a + notebook instance. + + Attributes: + name (str): + Output only. Name of this environment. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + display_name (str): + Display name of this environment for the UI. + description (str): + A brief description of this environment. + vm_image (google.cloud.notebooks_v1.types.VmImage): + Use a Compute Engine VM image to start the + notebook instance. + container_image (google.cloud.notebooks_v1.types.ContainerImage): + Use a container image to start the notebook + instance. + post_startup_script (str): + Path to a Bash script that automatically runs after a + notebook instance fully boots up. The path must be a URL or + Cloud Storage path. Example: + ``"gs://path-to-file/file-name"`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which this + environment was created. + """ + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + vm_image = proto.Field( + proto.MESSAGE, number=6, oneof="image_type", message="VmImage", + ) + container_image = proto.Field( + proto.MESSAGE, number=7, oneof="image_type", message="ContainerImage", + ) + post_startup_script = proto.Field(proto.STRING, number=8,) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + + +class VmImage(proto.Message): + r"""Definition of a custom Compute Engine virtual machine image + for starting a notebook instance with the environment installed + directly on the VM. + + Attributes: + project (str): + Required. The name of the Google Cloud project that this VM + image belongs to. Format: ``projects/{project_id}`` + image_name (str): + Use VM image name to find the image. + image_family (str): + Use this VM image family to find the image; + the newest image in this family will be used. + """ + + project = proto.Field(proto.STRING, number=1,) + image_name = proto.Field(proto.STRING, number=2, oneof="image",) + image_family = proto.Field(proto.STRING, number=3, oneof="image",) + + +class ContainerImage(proto.Message): + r"""Definition of a container image for starting a notebook + instance with the environment installed in a container. + + Attributes: + repository (str): + Required. The path to the container image repository. For + example: ``gcr.io/{project_id}/{image_name}`` + tag (str): + The tag of the container image. If not + specified, this defaults to the latest tag. + """ + + repository = proto.Field(proto.STRING, number=1,) + tag = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/event.py b/google/cloud/notebooks_v1/types/event.py new file mode 100644 index 0000000..36bca24 --- /dev/null +++ b/google/cloud/notebooks_v1/types/event.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module(package="google.cloud.notebooks.v1", manifest={"Event",},) + + +class Event(proto.Message): + r"""The definition of an Event for a managed / semi-managed + notebook instance. + + Attributes: + report_time (google.protobuf.timestamp_pb2.Timestamp): + Event report time. + type_ (google.cloud.notebooks_v1.types.Event.EventType): + Event type. + """ + + class EventType(proto.Enum): + r"""The definition of the even types.""" + EVENT_TYPE_UNSPECIFIED = 0 + IDLE = 1 + + report_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + type_ = proto.Field(proto.ENUM, number=2, enum=EventType,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/execution.py b/google/cloud/notebooks_v1/types/execution.py new file mode 100644 index 0000000..1bb8d40 --- /dev/null +++ b/google/cloud/notebooks_v1/types/execution.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", manifest={"ExecutionTemplate", "Execution",}, +) + + +class ExecutionTemplate(proto.Message): + r"""The description a notebook execution workload. + Attributes: + scale_tier (google.cloud.notebooks_v1.types.ExecutionTemplate.ScaleTier): + Required. Scale tier of the hardware used for + notebook execution. DEPRECATED Will be + discontinued. As right now only CUSTOM is + supported. + master_type (str): + Specifies the type of virtual machine to use for your + training job's master worker. You must specify this field + when ``scaleTier`` is set to ``CUSTOM``. + + You can use certain Compute Engine machine types directly in + this field. The following types are supported: + + - ``n1-standard-4`` + - ``n1-standard-8`` + - ``n1-standard-16`` + - ``n1-standard-32`` + - ``n1-standard-64`` + - ``n1-standard-96`` + - ``n1-highmem-2`` + - ``n1-highmem-4`` + - ``n1-highmem-8`` + - ``n1-highmem-16`` + - ``n1-highmem-32`` + - ``n1-highmem-64`` + - ``n1-highmem-96`` + - ``n1-highcpu-16`` + - ``n1-highcpu-32`` + - ``n1-highcpu-64`` + - ``n1-highcpu-96`` + + Alternatively, you can use the following legacy machine + types: + + - ``standard`` + - ``large_model`` + - ``complex_model_s`` + - ``complex_model_m`` + - ``complex_model_l`` + - ``standard_gpu`` + - ``complex_model_m_gpu`` + - ``complex_model_l_gpu`` + - ``standard_p100`` + - ``complex_model_m_p100`` + - ``standard_v100`` + - ``large_model_v100`` + - ``complex_model_m_v100`` + - ``complex_model_l_v100`` + + Finally, if you want to use a TPU for training, specify + ``cloud_tpu`` in this field. Learn more about the [special + configuration options for training with TPU. + accelerator_config (google.cloud.notebooks_v1.types.ExecutionTemplate.SchedulerAcceleratorConfig): + Configuration (count and accelerator type) + for hardware running notebook execution. + labels (Sequence[google.cloud.notebooks_v1.types.ExecutionTemplate.LabelsEntry]): + Labels for execution. + If execution is scheduled, a field included will + be 'nbs-scheduled'. Otherwise, it is an + immediate execution, and an included field will + be 'nbs-immediate'. Use fields to efficiently + index between various types of executions. + input_notebook_file (str): + Path to the notebook file to execute. Must be in a Google + Cloud Storage bucket. Format: + gs://{project_id}/{folder}/{notebook_file_name} Ex: + gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb + container_image_uri (str): + Container Image URI to a DLVM + Example: 'gcr.io/deeplearning-platform- + release/base-cu100' More examples can be found + at: + https://cloud.google.com/ai-platform/deep- + learning-containers/docs/choosing-container + output_notebook_folder (str): + Path to the notebook folder to write to. Must be in a Google + Cloud Storage bucket path. Format: + gs://{project_id}/{folder} Ex: + gs://notebook_user/scheduled_notebooks + params_yaml_file (str): + Parameters to be overridden in the notebook during + execution. Ref + https://papermill.readthedocs.io/en/latest/usage-parameterize.html + on how to specifying parameters in the input notebook and + pass them here in an YAML file. Ex: + gs://notebook_user/scheduled_notebooks/sentiment_notebook_params.yaml + parameters (str): + Parameters used within the 'input_notebook_file' notebook. + service_account (str): + The email address of a service account to use when running + the execution. You must have the + ``iam.serviceAccounts.actAs`` permission for the specified + service account. + job_type (google.cloud.notebooks_v1.types.ExecutionTemplate.JobType): + The type of Job to be used on this execution. + dataproc_parameters (google.cloud.notebooks_v1.types.ExecutionTemplate.DataprocParameters): + Parameters used in Dataproc JobType + executions. + """ + + class ScaleTier(proto.Enum): + r"""Required. Specifies the machine types, the number of replicas + for workers and parameter servers. + """ + SCALE_TIER_UNSPECIFIED = 0 + BASIC = 1 + STANDARD_1 = 2 + PREMIUM_1 = 3 + BASIC_GPU = 4 + BASIC_TPU = 5 + CUSTOM = 6 + + class SchedulerAcceleratorType(proto.Enum): + r"""Hardware accelerator types for AI Platform Training jobs.""" + SCHEDULER_ACCELERATOR_TYPE_UNSPECIFIED = 0 + NVIDIA_TESLA_K80 = 1 + NVIDIA_TESLA_P100 = 2 + NVIDIA_TESLA_V100 = 3 + NVIDIA_TESLA_P4 = 4 + NVIDIA_TESLA_T4 = 5 + TPU_V2 = 6 + TPU_V3 = 7 + + class JobType(proto.Enum): + r"""The backend used for this execution.""" + JOB_TYPE_UNSPECIFIED = 0 + VERTEX_AI = 1 + DATAPROC = 2 + + class SchedulerAcceleratorConfig(proto.Message): + r"""Definition of a hardware accelerator. Note that not all combinations + of ``type`` and ``core_count`` are valid. Check GPUs on Compute + Engine to find a valid combination. TPUs are not supported. + + Attributes: + type_ (google.cloud.notebooks_v1.types.ExecutionTemplate.SchedulerAcceleratorType): + Type of this accelerator. + core_count (int): + Count of cores of this accelerator. + """ + + type_ = proto.Field( + proto.ENUM, number=1, enum="ExecutionTemplate.SchedulerAcceleratorType", + ) + core_count = proto.Field(proto.INT64, number=2,) + + class DataprocParameters(proto.Message): + r"""Parameters used in Dataproc JobType executions. + Attributes: + cluster (str): + URI for cluster used to run Dataproc execution. Format: + 'projects/{PROJECT_ID}/regions/{REGION}/clusters/{CLUSTER_NAME} + """ + + cluster = proto.Field(proto.STRING, number=1,) + + scale_tier = proto.Field(proto.ENUM, number=1, enum=ScaleTier,) + master_type = proto.Field(proto.STRING, number=2,) + accelerator_config = proto.Field( + proto.MESSAGE, number=3, message=SchedulerAcceleratorConfig, + ) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) + input_notebook_file = proto.Field(proto.STRING, number=5,) + container_image_uri = proto.Field(proto.STRING, number=6,) + output_notebook_folder = proto.Field(proto.STRING, number=7,) + params_yaml_file = proto.Field(proto.STRING, number=8,) + parameters = proto.Field(proto.STRING, number=9,) + service_account = proto.Field(proto.STRING, number=10,) + job_type = proto.Field(proto.ENUM, number=11, enum=JobType,) + dataproc_parameters = proto.Field( + proto.MESSAGE, number=12, oneof="job_parameters", message=DataprocParameters, + ) + + +class Execution(proto.Message): + r"""The definition of a single executed notebook. + Attributes: + execution_template (google.cloud.notebooks_v1.types.ExecutionTemplate): + execute metadata including name, hardware + spec, region, labels, etc. + name (str): + Output only. The resource name of the execute. Format: + \`projects/{project_id}/locations/{location}/execution/{execution_id} + display_name (str): + Output only. Name used for UI purposes. Name can only + contain alphanumeric characters and underscores '_'. + description (str): + A brief description of this execution. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the Execution was + instantiated. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the Execution was last + updated. + state (google.cloud.notebooks_v1.types.Execution.State): + Output only. State of the underlying AI + Platform job. + output_notebook_file (str): + Output notebook file generated by this + execution + job_uri (str): + Output only. The URI of the external job used + to execute the notebook. + """ + + class State(proto.Enum): + r"""Enum description of the state of the underlying AIP job.""" + STATE_UNSPECIFIED = 0 + QUEUED = 1 + PREPARING = 2 + RUNNING = 3 + SUCCEEDED = 4 + FAILED = 5 + CANCELLING = 6 + CANCELLED = 7 + EXPIRED = 9 + INITIALIZING = 10 + + execution_template = proto.Field( + proto.MESSAGE, number=1, message="ExecutionTemplate", + ) + name = proto.Field(proto.STRING, number=2,) + display_name = proto.Field(proto.STRING, number=3,) + description = proto.Field(proto.STRING, number=4,) + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=7, enum=State,) + output_notebook_file = proto.Field(proto.STRING, number=8,) + job_uri = proto.Field(proto.STRING, number=9,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/instance.py b/google/cloud/notebooks_v1/types/instance.py new file mode 100644 index 0000000..4cd3602 --- /dev/null +++ b/google/cloud/notebooks_v1/types/instance.py @@ -0,0 +1,516 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.notebooks_v1.types import environment +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", manifest={"ReservationAffinity", "Instance",}, +) + + +class ReservationAffinity(proto.Message): + r"""Reservation Affinity for consuming Zonal reservation. + + Attributes: + consume_reservation_type (google.cloud.notebooks_v1.types.ReservationAffinity.Type): + Optional. Type of reservation to consume + key (str): + Optional. Corresponds to the label key of + reservation resource. + values (Sequence[str]): + Optional. Corresponds to the label values of + reservation resource. + """ + + class Type(proto.Enum): + r"""Indicates whether to consume capacity from an reservation or + not. + """ + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_reservation_type = proto.Field(proto.ENUM, number=1, enum=Type,) + key = proto.Field(proto.STRING, number=2,) + values = proto.RepeatedField(proto.STRING, number=3,) + + +class Instance(proto.Message): + r"""The definition of a notebook instance. + + Attributes: + name (str): + Output only. The name of this notebook instance. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + vm_image (google.cloud.notebooks_v1.types.VmImage): + Use a Compute Engine VM image to start the + notebook instance. + container_image (google.cloud.notebooks_v1.types.ContainerImage): + Use a container image to start the notebook + instance. + post_startup_script (str): + Path to a Bash script that automatically runs + after a notebook instance fully boots up. The + path must be a URL or Cloud Storage path + (gs://path-to-file/file-name). + proxy_uri (str): + Output only. The proxy endpoint that is used + to access the Jupyter notebook. + instance_owners (Sequence[str]): + Input only. The owner of this instance after creation. + Format: ``alias@example.com`` + + Currently supports one owner only. If not specified, all of + the service account users of your VM instance's service + account can use the instance. + service_account (str): + The service account on this instance, giving access to other + Google Cloud services. You can use any service account + within the same project, but you must have the service + account user permission to use the instance. + + If not specified, the `Compute Engine default service + account `__ + is used. + service_account_scopes (Sequence[str]): + Optional. The URIs of service account scopes to be included + in Compute Engine instances. + + If not specified, the following + `scopes `__ + are defined: + + - https://www.googleapis.com/auth/cloud-platform + - https://www.googleapis.com/auth/userinfo.email If not + using default scopes, you need at least: + https://www.googleapis.com/auth/compute + machine_type (str): + Required. The `Compute Engine machine + type `__ of this instance. + accelerator_config (google.cloud.notebooks_v1.types.Instance.AcceleratorConfig): + The hardware accelerator used on this instance. If you use + accelerators, make sure that your configuration has `enough + vCPUs and memory to support the ``machine_type`` you have + selected `__. + state (google.cloud.notebooks_v1.types.Instance.State): + Output only. The state of this instance. + install_gpu_driver (bool): + Whether the end user authorizes Google Cloud + to install GPU driver on this instance. + If this field is empty or set to false, the GPU + driver won't be installed. Only applicable to + instances with GPUs. + custom_gpu_driver_path (str): + Specify a custom Cloud Storage path where the + GPU driver is stored. If not specified, we'll + automatically choose from official GPU drivers. + boot_disk_type (google.cloud.notebooks_v1.types.Instance.DiskType): + Input only. The type of the boot disk attached to this + instance, defaults to standard persistent disk + (``PD_STANDARD``). + boot_disk_size_gb (int): + Input only. The size of the boot disk in GB + attached to this instance, up to a maximum of + 64000 GB (64 TB). The minimum + recommended value is 100 GB. If not + specified, this defaults to 100. + data_disk_type (google.cloud.notebooks_v1.types.Instance.DiskType): + Input only. The type of the data disk attached to this + instance, defaults to standard persistent disk + (``PD_STANDARD``). + data_disk_size_gb (int): + Input only. The size of the data disk in GB + attached to this instance, up to a maximum of + 64000 GB (64 TB). You can choose the + size of the data disk based on how big your + notebooks and data are. If not specified, this + defaults to 100. + no_remove_data_disk (bool): + Input only. If true, the data disk will not + be auto deleted when deleting the instance. + disk_encryption (google.cloud.notebooks_v1.types.Instance.DiskEncryption): + Input only. Disk encryption method used on + the boot and data disks, defaults to GMEK. + kms_key (str): + Input only. The KMS key used to encrypt the disks, only + applicable if disk_encryption is CMEK. Format: + ``projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`` + + Learn more about `using your own encryption + keys `__. + disks (Sequence[google.cloud.notebooks_v1.types.Instance.Disk]): + Output only. Attached disks to notebook + instance. + shielded_instance_config (google.cloud.notebooks_v1.types.Instance.ShieldedInstanceConfig): + Optional. Shielded VM configuration. [Images using supported + Shielded VM features] + (https://cloud.google.com/compute/docs/instances/modifying-shielded-vm). + no_public_ip (bool): + If true, no public IP will be assigned to + this instance. + no_proxy_access (bool): + If true, the notebook instance will not + register with the proxy. + network (str): + The name of the VPC that this instance is in. Format: + ``projects/{project_id}/global/networks/{network_id}`` + subnet (str): + The name of the subnet that this instance is in. Format: + ``projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}`` + labels (Sequence[google.cloud.notebooks_v1.types.Instance.LabelsEntry]): + Labels to apply to this instance. + These can be later modified by the setLabels + method. + metadata (Sequence[google.cloud.notebooks_v1.types.Instance.MetadataEntry]): + Custom metadata to apply to this instance. + tags (Sequence[str]): + Optional. The Compute Engine tags to add to runtime (see + `Tagging + instances `__). + upgrade_history (Sequence[google.cloud.notebooks_v1.types.Instance.UpgradeHistoryEntry]): + The upgrade history of this instance. + nic_type (google.cloud.notebooks_v1.types.Instance.NicType): + Optional. The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. + reservation_affinity (google.cloud.notebooks_v1.types.ReservationAffinity): + Optional. The optional reservation affinity. Setting this + field will apply the specified `Zonal Compute + Reservation `__ + to this notebook instance. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Instance creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Instance update time. + """ + + class AcceleratorType(proto.Enum): + r"""Definition of the types of hardware accelerators that can be + used on this instance. + """ + ACCELERATOR_TYPE_UNSPECIFIED = 0 + NVIDIA_TESLA_K80 = 1 + NVIDIA_TESLA_P100 = 2 + NVIDIA_TESLA_V100 = 3 + NVIDIA_TESLA_P4 = 4 + NVIDIA_TESLA_T4 = 5 + NVIDIA_TESLA_A100 = 11 + NVIDIA_TESLA_T4_VWS = 8 + NVIDIA_TESLA_P100_VWS = 9 + NVIDIA_TESLA_P4_VWS = 10 + TPU_V2 = 6 + TPU_V3 = 7 + + class State(proto.Enum): + r"""The definition of the states of this instance.""" + STATE_UNSPECIFIED = 0 + STARTING = 1 + PROVISIONING = 2 + ACTIVE = 3 + STOPPING = 4 + STOPPED = 5 + DELETED = 6 + UPGRADING = 7 + INITIALIZING = 8 + REGISTERING = 9 + + class DiskType(proto.Enum): + r"""Possible disk types for notebook instances.""" + DISK_TYPE_UNSPECIFIED = 0 + PD_STANDARD = 1 + PD_SSD = 2 + PD_BALANCED = 3 + + class DiskEncryption(proto.Enum): + r"""Definition of the disk encryption options.""" + DISK_ENCRYPTION_UNSPECIFIED = 0 + GMEK = 1 + CMEK = 2 + + class NicType(proto.Enum): + r"""The type of vNIC driver. Default should be UNSPECIFIED_NIC_TYPE.""" + UNSPECIFIED_NIC_TYPE = 0 + VIRTIO_NET = 1 + GVNIC = 2 + + class AcceleratorConfig(proto.Message): + r"""Definition of a hardware accelerator. Note that not all combinations + of ``type`` and ``core_count`` are valid. Check `GPUs on Compute + Engine `__ to find a valid + combination. TPUs are not supported. + + Attributes: + type_ (google.cloud.notebooks_v1.types.Instance.AcceleratorType): + Type of this accelerator. + core_count (int): + Count of cores of this accelerator. + """ + + type_ = proto.Field(proto.ENUM, number=1, enum="Instance.AcceleratorType",) + core_count = proto.Field(proto.INT64, number=2,) + + class Disk(proto.Message): + r"""An instance-attached disk resource. + + Attributes: + auto_delete (bool): + Indicates whether the disk will be auto- + eleted when the instance is deleted (but not + when the disk is detached from the instance). + boot (bool): + Indicates that this is a boot disk. The + virtual machine will use the first partition of + the disk for its root filesystem. + device_name (str): + Indicates a unique device name of your choice that is + reflected into the /dev/disk/by-id/google-\* tree of a Linux + operating system running within the instance. This name can + be used to reference the device for mounting, resizing, and + so on, from within the instance. + + If not specified, the server chooses a default device name + to apply to this disk, in the form persistent-disk-x, where + x is a number assigned by Google Compute Engine.This field + is only applicable for persistent disks. + disk_size_gb (int): + Indicates the size of the disk in base-2 GB. + guest_os_features (Sequence[google.cloud.notebooks_v1.types.Instance.Disk.GuestOsFeature]): + Indicates a list of features to enable on the + guest operating system. Applicable only for + bootable images. Read Enabling guest operating + system features to see a list of available + options. + index (int): + A zero-based index to this disk, where 0 is + reserved for the boot disk. If you have many + disks attached to an instance, each disk would + have a unique index number. + interface (str): + Indicates the disk interface to use for + attaching this disk, which is either SCSI or + NVME. The default is SCSI. Persistent disks must + always use SCSI and the request will fail if you + attempt to attach a persistent disk in any other + format than SCSI. Local SSDs can use either NVME + or SCSI. For performance characteristics of SCSI + over NVMe, see Local SSD performance. + Valid values: + + * NVME + * SCSI + kind (str): + Type of the resource. Always + compute#attachedDisk for attached disks. + licenses (Sequence[str]): + A list of publicly visible licenses. Reserved + for Google's use. A License represents billing + and aggregate usage data for public and + marketplace images. + mode (str): + The mode in which to attach this disk, either READ_WRITE or + READ_ONLY. If not specified, the default is to attach the + disk in READ_WRITE mode. Valid values: READ_ONLY READ_WRITE + source (str): + Indicates a valid partial or full URL to an + existing Persistent Disk resource. + type_ (str): + Indicates the type of the disk, either + SCRATCH or PERSISTENT. Valid values: + + * PERSISTENT + * SCRATCH + """ + + class GuestOsFeature(proto.Message): + r"""Guest OS features for boot disk. + + Attributes: + type_ (str): + The ID of a supported feature. Read Enabling guest operating + system features to see a list of available options. Valid + values: FEATURE_TYPE_UNSPECIFIED MULTI_IP_SUBNET SECURE_BOOT + UEFI_COMPATIBLE VIRTIO_SCSI_MULTIQUEUE WINDOWS + """ + + type_ = proto.Field(proto.STRING, number=1,) + + auto_delete = proto.Field(proto.BOOL, number=1,) + boot = proto.Field(proto.BOOL, number=2,) + device_name = proto.Field(proto.STRING, number=3,) + disk_size_gb = proto.Field(proto.INT64, number=4,) + guest_os_features = proto.RepeatedField( + proto.MESSAGE, number=5, message="Instance.Disk.GuestOsFeature", + ) + index = proto.Field(proto.INT64, number=6,) + interface = proto.Field(proto.STRING, number=7,) + kind = proto.Field(proto.STRING, number=8,) + licenses = proto.RepeatedField(proto.STRING, number=9,) + mode = proto.Field(proto.STRING, number=10,) + source = proto.Field(proto.STRING, number=11,) + type_ = proto.Field(proto.STRING, number=12,) + + class ShieldedInstanceConfig(proto.Message): + r"""A set of Shielded Instance options. Check [Images using supported + Shielded VM features] Not all combinations are valid. + + Attributes: + enable_secure_boot (bool): + Defines whether the instance has Secure Boot + enabled. + Secure Boot helps ensure that the system only + runs authentic software by verifying the digital + signature of all boot components, and halting + the boot process if signature verification + fails. Disabled by default. + enable_vtpm (bool): + Defines whether the instance has the vTPM + enabled. Enabled by default. + enable_integrity_monitoring (bool): + Defines whether the instance has integrity + monitoring enabled. + Enables monitoring and attestation of the boot + integrity of the instance. The attestation is + performed against the integrity policy baseline. + This baseline is initially derived from the + implicitly trusted boot image when the instance + is created. Enabled by default. + """ + + enable_secure_boot = proto.Field(proto.BOOL, number=1,) + enable_vtpm = proto.Field(proto.BOOL, number=2,) + enable_integrity_monitoring = proto.Field(proto.BOOL, number=3,) + + class UpgradeHistoryEntry(proto.Message): + r"""The entry of VM image upgrade history. + + Attributes: + snapshot (str): + The snapshot of the boot disk of this + notebook instance before upgrade. + vm_image (str): + The VM image before this instance upgrade. + container_image (str): + The container image before this instance + upgrade. + framework (str): + The framework of this notebook instance. + version (str): + The version of the notebook instance before + this upgrade. + state (google.cloud.notebooks_v1.types.Instance.UpgradeHistoryEntry.State): + The state of this instance upgrade history + entry. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time that this instance upgrade history + entry is created. + target_image (str): + Target VM Image. Format: ainotebooks- + m/project/image-name/name. + action (google.cloud.notebooks_v1.types.Instance.UpgradeHistoryEntry.Action): + Action. Rolloback or Upgrade. + target_version (str): + Target VM Version, like m63. + """ + + class State(proto.Enum): + r"""The definition of the states of this upgrade history entry.""" + STATE_UNSPECIFIED = 0 + STARTED = 1 + SUCCEEDED = 2 + FAILED = 3 + + class Action(proto.Enum): + r"""The definition of operations of this upgrade history entry.""" + ACTION_UNSPECIFIED = 0 + UPGRADE = 1 + ROLLBACK = 2 + + snapshot = proto.Field(proto.STRING, number=1,) + vm_image = proto.Field(proto.STRING, number=2,) + container_image = proto.Field(proto.STRING, number=3,) + framework = proto.Field(proto.STRING, number=4,) + version = proto.Field(proto.STRING, number=5,) + state = proto.Field( + proto.ENUM, number=6, enum="Instance.UpgradeHistoryEntry.State", + ) + create_time = proto.Field( + proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp, + ) + target_image = proto.Field(proto.STRING, number=8,) + action = proto.Field( + proto.ENUM, number=9, enum="Instance.UpgradeHistoryEntry.Action", + ) + target_version = proto.Field(proto.STRING, number=10,) + + name = proto.Field(proto.STRING, number=1,) + vm_image = proto.Field( + proto.MESSAGE, number=2, oneof="environment", message=environment.VmImage, + ) + container_image = proto.Field( + proto.MESSAGE, + number=3, + oneof="environment", + message=environment.ContainerImage, + ) + post_startup_script = proto.Field(proto.STRING, number=4,) + proxy_uri = proto.Field(proto.STRING, number=5,) + instance_owners = proto.RepeatedField(proto.STRING, number=6,) + service_account = proto.Field(proto.STRING, number=7,) + service_account_scopes = proto.RepeatedField(proto.STRING, number=31,) + machine_type = proto.Field(proto.STRING, number=8,) + accelerator_config = proto.Field( + proto.MESSAGE, number=9, message=AcceleratorConfig, + ) + state = proto.Field(proto.ENUM, number=10, enum=State,) + install_gpu_driver = proto.Field(proto.BOOL, number=11,) + custom_gpu_driver_path = proto.Field(proto.STRING, number=12,) + boot_disk_type = proto.Field(proto.ENUM, number=13, enum=DiskType,) + boot_disk_size_gb = proto.Field(proto.INT64, number=14,) + data_disk_type = proto.Field(proto.ENUM, number=25, enum=DiskType,) + data_disk_size_gb = proto.Field(proto.INT64, number=26,) + no_remove_data_disk = proto.Field(proto.BOOL, number=27,) + disk_encryption = proto.Field(proto.ENUM, number=15, enum=DiskEncryption,) + kms_key = proto.Field(proto.STRING, number=16,) + disks = proto.RepeatedField(proto.MESSAGE, number=28, message=Disk,) + shielded_instance_config = proto.Field( + proto.MESSAGE, number=30, message=ShieldedInstanceConfig, + ) + no_public_ip = proto.Field(proto.BOOL, number=17,) + no_proxy_access = proto.Field(proto.BOOL, number=18,) + network = proto.Field(proto.STRING, number=19,) + subnet = proto.Field(proto.STRING, number=20,) + labels = proto.MapField(proto.STRING, proto.STRING, number=21,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=22,) + tags = proto.RepeatedField(proto.STRING, number=32,) + upgrade_history = proto.RepeatedField( + proto.MESSAGE, number=29, message=UpgradeHistoryEntry, + ) + nic_type = proto.Field(proto.ENUM, number=33, enum=NicType,) + reservation_affinity = proto.Field( + proto.MESSAGE, number=34, message="ReservationAffinity", + ) + create_time = proto.Field( + proto.MESSAGE, number=23, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/instance_config.py b/google/cloud/notebooks_v1/types/instance_config.py new file mode 100644 index 0000000..f7edb43 --- /dev/null +++ b/google/cloud/notebooks_v1/types/instance_config.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", manifest={"InstanceConfig",}, +) + + +class InstanceConfig(proto.Message): + r"""Notebook instance configurations that can be updated. + Attributes: + notebook_upgrade_schedule (str): + Cron expression in UTC timezone, used to schedule instance + auto upgrade. Please follow the `cron + format `__. + enable_health_monitoring (bool): + Verifies core internal services are running. + More info: go/notebooks-health + """ + + notebook_upgrade_schedule = proto.Field(proto.STRING, number=1,) + enable_health_monitoring = proto.Field(proto.BOOL, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/managed_service.py b/google/cloud/notebooks_v1/types/managed_service.py new file mode 100644 index 0000000..432a7bf --- /dev/null +++ b/google/cloud/notebooks_v1/types/managed_service.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.notebooks_v1.types import event as gcn_event +from google.cloud.notebooks_v1.types import runtime as gcn_runtime + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", + manifest={ + "ListRuntimesRequest", + "ListRuntimesResponse", + "GetRuntimeRequest", + "CreateRuntimeRequest", + "DeleteRuntimeRequest", + "StartRuntimeRequest", + "StopRuntimeRequest", + "SwitchRuntimeRequest", + "ResetRuntimeRequest", + "ReportRuntimeEventRequest", + }, +) + + +class ListRuntimesRequest(proto.Message): + r"""Request for listing Managed Notebook Runtimes. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + page_size (int): + Maximum return size of the list call. + page_token (str): + A previous returned page token that can be + used to continue listing from the last result. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListRuntimesResponse(proto.Message): + r"""Response for listing Managed Notebook Runtimes. + Attributes: + runtimes (Sequence[google.cloud.notebooks_v1.types.Runtime]): + A list of returned Runtimes. + next_page_token (str): + Page token that can be used to continue + listing from the last result in the next list + call. + unreachable (Sequence[str]): + Locations that could not be reached. For example, + ['us-west1', 'us-central1']. A ListRuntimesResponse will + only contain either runtimes or unreachables, + """ + + @property + def raw_page(self): + return self + + runtimes = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcn_runtime.Runtime, + ) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetRuntimeRequest(proto.Message): + r"""Request for getting a Managed Notebook Runtime. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateRuntimeRequest(proto.Message): + r"""Request for creating a Managed Notebook Runtime. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + runtime_id (str): + Required. User-defined unique ID of this + Runtime. + runtime (google.cloud.notebooks_v1.types.Runtime): + Required. The Runtime to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + runtime_id = proto.Field(proto.STRING, number=2,) + runtime = proto.Field(proto.MESSAGE, number=3, message=gcn_runtime.Runtime,) + + +class DeleteRuntimeRequest(proto.Message): + r"""Request for deleting a Managed Notebook Runtime. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class StartRuntimeRequest(proto.Message): + r"""Request for starting a Managed Notebook Runtime. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class StopRuntimeRequest(proto.Message): + r"""Request for stopping a Managed Notebook Runtime. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class SwitchRuntimeRequest(proto.Message): + r"""Request for switching a Managed Notebook Runtime. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + machine_type (str): + machine type. + accelerator_config (google.cloud.notebooks_v1.types.RuntimeAcceleratorConfig): + accelerator config. + """ + + name = proto.Field(proto.STRING, number=1,) + machine_type = proto.Field(proto.STRING, number=2,) + accelerator_config = proto.Field( + proto.MESSAGE, number=3, message=gcn_runtime.RuntimeAcceleratorConfig, + ) + + +class ResetRuntimeRequest(proto.Message): + r"""Request for reseting a Managed Notebook Runtime. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ReportRuntimeEventRequest(proto.Message): + r"""Request for reporting a Managed Notebook Event. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/runtimes/{runtime_id}`` + vm_id (str): + Required. The VM hardware token for + authenticating the VM. + https://cloud.google.com/compute/docs/instances/verifying- + instance-identity + event (google.cloud.notebooks_v1.types.Event): + Required. The Event to be reported. + """ + + name = proto.Field(proto.STRING, number=1,) + vm_id = proto.Field(proto.STRING, number=2,) + event = proto.Field(proto.MESSAGE, number=3, message=gcn_event.Event,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/runtime.py b/google/cloud/notebooks_v1/types/runtime.py new file mode 100644 index 0000000..1b448e4 --- /dev/null +++ b/google/cloud/notebooks_v1/types/runtime.py @@ -0,0 +1,590 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.notebooks_v1.types import environment +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", + manifest={ + "Runtime", + "RuntimeAcceleratorConfig", + "EncryptionConfig", + "LocalDisk", + "LocalDiskInitializeParams", + "RuntimeAccessConfig", + "RuntimeSoftwareConfig", + "RuntimeMetrics", + "RuntimeShieldedInstanceConfig", + "VirtualMachine", + "VirtualMachineConfig", + }, +) + + +class Runtime(proto.Message): + r"""The definition of a Runtime for a managed notebook instance. + Attributes: + name (str): + Output only. The resource name of the runtime. Format: + ``projects/{project}/locations/{location}/runtimes/{runtimeId}`` + virtual_machine (google.cloud.notebooks_v1.types.VirtualMachine): + Use a Compute Engine VM image to start the + managed notebook instance. + state (google.cloud.notebooks_v1.types.Runtime.State): + Output only. Runtime state. + health_state (google.cloud.notebooks_v1.types.Runtime.HealthState): + Output only. Runtime health_state. + access_config (google.cloud.notebooks_v1.types.RuntimeAccessConfig): + The config settings for accessing runtime. + software_config (google.cloud.notebooks_v1.types.RuntimeSoftwareConfig): + The config settings for software inside the + runtime. + metrics (google.cloud.notebooks_v1.types.RuntimeMetrics): + Output only. Contains Runtime daemon metrics + such as Service status and JupyterLab stats. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Runtime creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Runtime update time. + """ + + class State(proto.Enum): + r"""The definition of the states of this runtime.""" + STATE_UNSPECIFIED = 0 + STARTING = 1 + PROVISIONING = 2 + ACTIVE = 3 + STOPPING = 4 + STOPPED = 5 + DELETING = 6 + UPGRADING = 7 + INITIALIZING = 8 + + class HealthState(proto.Enum): + r"""The runtime substate.""" + HEALTH_STATE_UNSPECIFIED = 0 + HEALTHY = 1 + UNHEALTHY = 2 + + name = proto.Field(proto.STRING, number=1,) + virtual_machine = proto.Field( + proto.MESSAGE, number=2, oneof="runtime_type", message="VirtualMachine", + ) + state = proto.Field(proto.ENUM, number=3, enum=State,) + health_state = proto.Field(proto.ENUM, number=4, enum=HealthState,) + access_config = proto.Field(proto.MESSAGE, number=5, message="RuntimeAccessConfig",) + software_config = proto.Field( + proto.MESSAGE, number=6, message="RuntimeSoftwareConfig", + ) + metrics = proto.Field(proto.MESSAGE, number=7, message="RuntimeMetrics",) + create_time = proto.Field( + proto.MESSAGE, number=20, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=21, message=timestamp_pb2.Timestamp, + ) + + +class RuntimeAcceleratorConfig(proto.Message): + r"""Definition of the types of hardware accelerators that can be used. + Definition of the types of hardware accelerators that can be used. + See `Compute Engine + AcceleratorTypes `__. + Examples: + + - ``nvidia-tesla-k80`` + - ``nvidia-tesla-p100`` + - ``nvidia-tesla-v100`` + - ``nvidia-tesla-t4`` + - ``nvidia-tesla-a100`` + + Attributes: + type_ (google.cloud.notebooks_v1.types.RuntimeAcceleratorConfig.AcceleratorType): + Accelerator model. + core_count (int): + Count of cores of this accelerator. + """ + + class AcceleratorType(proto.Enum): + r"""Type of this accelerator.""" + ACCELERATOR_TYPE_UNSPECIFIED = 0 + NVIDIA_TESLA_K80 = 1 + NVIDIA_TESLA_P100 = 2 + NVIDIA_TESLA_V100 = 3 + NVIDIA_TESLA_P4 = 4 + NVIDIA_TESLA_T4 = 5 + NVIDIA_TESLA_A100 = 6 + TPU_V2 = 7 + TPU_V3 = 8 + NVIDIA_TESLA_T4_VWS = 9 + NVIDIA_TESLA_P100_VWS = 10 + NVIDIA_TESLA_P4_VWS = 11 + + type_ = proto.Field(proto.ENUM, number=1, enum=AcceleratorType,) + core_count = proto.Field(proto.INT64, number=2,) + + +class EncryptionConfig(proto.Message): + r"""Represents a custom encryption key configuration that can be + applied to a resource. This will encrypt all disks in Virtual + Machine. + + Attributes: + kms_key (str): + The Cloud KMS resource identifier of the customer-managed + encryption key used to protect a resource, such as a disks. + It has the following format: + ``projects/{PROJECT_ID}/locations/{REGION}/keyRings/{KEY_RING_NAME}/cryptoKeys/{KEY_NAME}`` + """ + + kms_key = proto.Field(proto.STRING, number=1,) + + +class LocalDisk(proto.Message): + r"""An Local attached disk resource. + Attributes: + auto_delete (bool): + Optional. Output only. Specifies whether the + disk will be auto-deleted when the instance is + deleted (but not when the disk is detached from + the instance). + boot (bool): + Optional. Output only. Indicates that this is + a boot disk. The virtual machine will use the + first partition of the disk for its root + filesystem. + device_name (str): + Optional. Output only. Specifies a unique device name of + your choice that is reflected into the + /dev/disk/by-id/google-\* tree of a Linux operating system + running within the instance. This name can be used to + reference the device for mounting, resizing, and so on, from + within the instance. + + If not specified, the server chooses a default device name + to apply to this disk, in the form persistent-disk-x, where + x is a number assigned by Google Compute Engine. This field + is only applicable for persistent disks. + guest_os_features (Sequence[google.cloud.notebooks_v1.types.LocalDisk.RuntimeGuestOsFeature]): + Output only. Indicates a list of features to + enable on the guest operating system. Applicable + only for bootable images. Read Enabling guest + operating system features to see a list of + available options. + index (int): + Output only. A zero-based index to this disk, + where 0 is reserved for the boot disk. If you + have many disks attached to an instance, each + disk would have a unique index number. + initialize_params (google.cloud.notebooks_v1.types.LocalDiskInitializeParams): + Input only. Specifies the parameters for a + new disk that will be created alongside the new + instance. Use initialization parameters to + create boot disks or local SSDs attached to the + new instance. + This property is mutually exclusive with the + source property; you can only define one or the + other, but not both. + interface (str): + Specifies the disk interface to use for + attaching this disk, which is either SCSI or + NVME. The default is SCSI. Persistent disks must + always use SCSI and the request will fail if you + attempt to attach a persistent disk in any other + format than SCSI. Local SSDs can use either NVME + or SCSI. For performance characteristics of SCSI + over NVMe, see Local SSD performance. Valid + values: + + * NVME + * SCSI + kind (str): + Output only. Type of the resource. Always + compute#attachedDisk for attached disks. + licenses (Sequence[str]): + Output only. Any valid publicly visible + licenses. + mode (str): + The mode in which to attach this disk, either READ_WRITE or + READ_ONLY. If not specified, the default is to attach the + disk in READ_WRITE mode. Valid values: READ_ONLY READ_WRITE + source (str): + Specifies a valid partial or full URL to an + existing Persistent Disk resource. + type_ (str): + Specifies the type of the disk, either + SCRATCH or PERSISTENT. If not specified, the + default is PERSISTENT. Valid values: + + * PERSISTENT + * SCRATCH + """ + + class RuntimeGuestOsFeature(proto.Message): + r"""Optional. A list of features to enable on the guest operating + system. Applicable only for bootable images. Read `Enabling guest + operating system + features `__ + to see a list of available options. Guest OS features for boot disk. + + Attributes: + type_ (str): + The ID of a supported feature. Read `Enabling guest + operating system + features `__ + to see a list of available options. + + Valid values: + + - FEATURE_TYPE_UNSPECIFIED + - MULTI_IP_SUBNET + - SECURE_BOOT + - UEFI_COMPATIBLE + - VIRTIO_SCSI_MULTIQUEUE + - WINDOWS + """ + + type_ = proto.Field(proto.STRING, number=1,) + + auto_delete = proto.Field(proto.BOOL, number=1,) + boot = proto.Field(proto.BOOL, number=2,) + device_name = proto.Field(proto.STRING, number=3,) + guest_os_features = proto.RepeatedField( + proto.MESSAGE, number=4, message=RuntimeGuestOsFeature, + ) + index = proto.Field(proto.INT32, number=5,) + initialize_params = proto.Field( + proto.MESSAGE, number=6, message="LocalDiskInitializeParams", + ) + interface = proto.Field(proto.STRING, number=7,) + kind = proto.Field(proto.STRING, number=8,) + licenses = proto.RepeatedField(proto.STRING, number=9,) + mode = proto.Field(proto.STRING, number=10,) + source = proto.Field(proto.STRING, number=11,) + type_ = proto.Field(proto.STRING, number=12,) + + +class LocalDiskInitializeParams(proto.Message): + r"""Input only. Specifies the parameters for a new disk that will + be created alongside the new instance. Use initialization + parameters to create boot disks or local SSDs attached to the + new runtime. + This property is mutually exclusive with the source property; + you can only define one or the other, but not both. + + Attributes: + description (str): + Optional. Provide this property when creating + the disk. + disk_name (str): + Optional. Specifies the disk name. If not + specified, the default is to use the name of the + instance. If the disk with the instance name + exists already in the given zone/region, a new + name will be automatically generated. + disk_size_gb (int): + Optional. Specifies the size of the disk in + base-2 GB. If not specified, the disk will be + the same size as the image (usually 10GB). If + specified, the size must be equal to or larger + than 10GB. Default 100 GB. + disk_type (google.cloud.notebooks_v1.types.LocalDiskInitializeParams.DiskType): + Input only. The type of the boot disk attached to this + instance, defaults to standard persistent disk + (``PD_STANDARD``). + labels (Sequence[google.cloud.notebooks_v1.types.LocalDiskInitializeParams.LabelsEntry]): + Optional. Labels to apply to this disk. These + can be later modified by the disks.setLabels + method. This field is only applicable for + persistent disks. + """ + + class DiskType(proto.Enum): + r"""Possible disk types.""" + DISK_TYPE_UNSPECIFIED = 0 + PD_STANDARD = 1 + PD_SSD = 2 + PD_BALANCED = 3 + + description = proto.Field(proto.STRING, number=1,) + disk_name = proto.Field(proto.STRING, number=2,) + disk_size_gb = proto.Field(proto.INT64, number=3,) + disk_type = proto.Field(proto.ENUM, number=4, enum=DiskType,) + labels = proto.MapField(proto.STRING, proto.STRING, number=5,) + + +class RuntimeAccessConfig(proto.Message): + r"""Specifies the login configuration for Runtime + Attributes: + access_type (google.cloud.notebooks_v1.types.RuntimeAccessConfig.RuntimeAccessType): + The type of access mode this instance. + runtime_owner (str): + The owner of this runtime after creation. Format: + ``alias@example.com`` Currently supports one owner only. + proxy_uri (str): + Output only. The proxy endpoint that is used + to access the runtime. + """ + + class RuntimeAccessType(proto.Enum): + r"""Possible ways to access runtime. Authentication mode. + Currently supports: Single User only. + """ + RUNTIME_ACCESS_TYPE_UNSPECIFIED = 0 + SINGLE_USER = 1 + + access_type = proto.Field(proto.ENUM, number=1, enum=RuntimeAccessType,) + runtime_owner = proto.Field(proto.STRING, number=2,) + proxy_uri = proto.Field(proto.STRING, number=3,) + + +class RuntimeSoftwareConfig(proto.Message): + r"""Specifies the selection and configuration of software inside the + runtime. The properties to set on runtime. Properties keys are + specified in ``key:value`` format, for example: + + - ``idle_shutdown: true`` + - ``idle_shutdown_timeout: 180`` + - ``report-system-health: true`` + + Attributes: + notebook_upgrade_schedule (str): + Cron expression in UTC timezone, used to schedule instance + auto upgrade. Please follow the `cron + format `__. + enable_health_monitoring (bool): + Verifies core internal services are running. + Default: True + idle_shutdown (bool): + Runtime will automatically shutdown after + idle_shutdown_time. Default: True + idle_shutdown_timeout (int): + Time in minutes to wait before shuting down + runtime. Default: 180 minutes + install_gpu_driver (bool): + Install Nvidia Driver automatically. + custom_gpu_driver_path (str): + Specify a custom Cloud Storage path where the + GPU driver is stored. If not specified, we'll + automatically choose from official GPU drivers. + post_startup_script (str): + Path to a Bash script that automatically runs + after a notebook instance fully boots up. The + path must be a URL or Cloud Storage path + (gs://path-to-file/file-name). + """ + + notebook_upgrade_schedule = proto.Field(proto.STRING, number=1,) + enable_health_monitoring = proto.Field(proto.BOOL, number=2, optional=True,) + idle_shutdown = proto.Field(proto.BOOL, number=3, optional=True,) + idle_shutdown_timeout = proto.Field(proto.INT32, number=4,) + install_gpu_driver = proto.Field(proto.BOOL, number=5,) + custom_gpu_driver_path = proto.Field(proto.STRING, number=6,) + post_startup_script = proto.Field(proto.STRING, number=7,) + + +class RuntimeMetrics(proto.Message): + r"""Contains runtime daemon metrics, such as OS and kernels and + sessions stats. + + Attributes: + system_metrics (Sequence[google.cloud.notebooks_v1.types.RuntimeMetrics.SystemMetricsEntry]): + Output only. The system metrics. + """ + + system_metrics = proto.MapField(proto.STRING, proto.STRING, number=1,) + + +class RuntimeShieldedInstanceConfig(proto.Message): + r"""A set of Shielded Instance options. Check `Images using supported + Shielded VM + features `__. + Not all combinations are valid. + + Attributes: + enable_secure_boot (bool): + Defines whether the instance has Secure Boot + enabled. + Secure Boot helps ensure that the system only + runs authentic software by verifying the digital + signature of all boot components, and halting + the boot process if signature verification + fails. Disabled by default. + enable_vtpm (bool): + Defines whether the instance has the vTPM + enabled. Enabled by default. + enable_integrity_monitoring (bool): + Defines whether the instance has integrity + monitoring enabled. + Enables monitoring and attestation of the boot + integrity of the instance. The attestation is + performed against the integrity policy baseline. + This baseline is initially derived from the + implicitly trusted boot image when the instance + is created. Enabled by default. + """ + + enable_secure_boot = proto.Field(proto.BOOL, number=1,) + enable_vtpm = proto.Field(proto.BOOL, number=2,) + enable_integrity_monitoring = proto.Field(proto.BOOL, number=3,) + + +class VirtualMachine(proto.Message): + r"""Runtime using Virtual Machine for computing. + Attributes: + instance_name (str): + Output only. The user-friendly name of the + Managed Compute Engine instance. + instance_id (str): + Output only. The unique identifier of the + Managed Compute Engine instance. + virtual_machine_config (google.cloud.notebooks_v1.types.VirtualMachineConfig): + Virtual Machine configuration settings. + """ + + instance_name = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) + virtual_machine_config = proto.Field( + proto.MESSAGE, number=3, message="VirtualMachineConfig", + ) + + +class VirtualMachineConfig(proto.Message): + r"""The config settings for virtual machine. + Attributes: + zone (str): + Output only. The zone where the virtual machine is located. + If using regional request, the notebooks service will pick a + location in the corresponding runtime region. On a get + request, zone will always be present. Example: + + - ``us-central1-b`` + machine_type (str): + Required. The Compute Engine machine type used for runtimes. + Short name is valid. Examples: + + - ``n1-standard-2`` + - ``e2-standard-8`` + container_images (Sequence[google.cloud.notebooks_v1.types.ContainerImage]): + Optional. Use a list of container images to + start the notebook instance. + data_disk (google.cloud.notebooks_v1.types.LocalDisk): + Required. Data disk option configuration + settings. + encryption_config (google.cloud.notebooks_v1.types.EncryptionConfig): + Optional. Encryption settings for virtual + machine data disk. + shielded_instance_config (google.cloud.notebooks_v1.types.RuntimeShieldedInstanceConfig): + Optional. Shielded VM Instance configuration + settings. + accelerator_config (google.cloud.notebooks_v1.types.RuntimeAcceleratorConfig): + Optional. The Compute Engine accelerator + configuration for this runtime. + network (str): + Optional. The Compute Engine network to be used for machine + communications. Cannot be specified with subnetwork. If + neither ``network`` nor ``subnet`` is specified, the + "default" network of the project is used, if it exists. + + A full URL or partial URI. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default`` + - ``projects/[project_id]/regions/global/default`` + + Runtimes are managed resources inside Google Infrastructure. + Runtimes support the following network configurations: + + - Google Managed Network (Network & subnet are empty) + - Consumer Project VPC (network & subnet are required). + Requires configuring Private Service Access. + - Shared VPC (network & subnet are required). Requires + configuring Private Service Access. + subnet (str): + Optional. The Compute Engine subnetwork to be used for + machine communications. Cannot be specified with network. + + A full URL or partial URI are valid. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0`` + - ``projects/[project_id]/regions/us-east1/subnetworks/sub0`` + internal_ip_only (bool): + Optional. If true, runtime will only have internal IP + addresses. By default, runtimes are not restricted to + internal IP addresses, and will have ephemeral external IP + addresses assigned to each vm. This ``internal_ip_only`` + restriction can only be enabled for subnetwork enabled + networks, and all dependencies must be configured to be + accessible without external IP addresses. + tags (Sequence[str]): + Optional. The Compute Engine tags to add to runtime (see + `Tagging + instances `__). + guest_attributes (Sequence[google.cloud.notebooks_v1.types.VirtualMachineConfig.GuestAttributesEntry]): + Output only. The Compute Engine guest attributes. (see + `Project and instance guest + attributes `__). + metadata (Sequence[google.cloud.notebooks_v1.types.VirtualMachineConfig.MetadataEntry]): + Optional. The Compute Engine metadata entries to add to + virtual machine. (see `Project and instance + metadata `__). + labels (Sequence[google.cloud.notebooks_v1.types.VirtualMachineConfig.LabelsEntry]): + Optional. The labels to associate with this runtime. Label + **keys** must contain 1 to 63 characters, and must conform + to `RFC 1035 `__. + Label **values** may be empty, but, if present, must contain + 1 to 63 characters, and must conform to `RFC + 1035 `__. No more than + 32 labels can be associated with a cluster. + nic_type (google.cloud.notebooks_v1.types.VirtualMachineConfig.NicType): + Optional. The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. + """ + + class NicType(proto.Enum): + r"""The type of vNIC driver. Default should be UNSPECIFIED_NIC_TYPE.""" + UNSPECIFIED_NIC_TYPE = 0 + VIRTIO_NET = 1 + GVNIC = 2 + + zone = proto.Field(proto.STRING, number=1,) + machine_type = proto.Field(proto.STRING, number=2,) + container_images = proto.RepeatedField( + proto.MESSAGE, number=3, message=environment.ContainerImage, + ) + data_disk = proto.Field(proto.MESSAGE, number=4, message="LocalDisk",) + encryption_config = proto.Field( + proto.MESSAGE, number=5, message="EncryptionConfig", + ) + shielded_instance_config = proto.Field( + proto.MESSAGE, number=6, message="RuntimeShieldedInstanceConfig", + ) + accelerator_config = proto.Field( + proto.MESSAGE, number=7, message="RuntimeAcceleratorConfig", + ) + network = proto.Field(proto.STRING, number=8,) + subnet = proto.Field(proto.STRING, number=9,) + internal_ip_only = proto.Field(proto.BOOL, number=10,) + tags = proto.RepeatedField(proto.STRING, number=13,) + guest_attributes = proto.MapField(proto.STRING, proto.STRING, number=14,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=15,) + labels = proto.MapField(proto.STRING, proto.STRING, number=16,) + nic_type = proto.Field(proto.ENUM, number=17, enum=NicType,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/schedule.py b/google/cloud/notebooks_v1/types/schedule.py new file mode 100644 index 0000000..118f0b9 --- /dev/null +++ b/google/cloud/notebooks_v1/types/schedule.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.notebooks_v1.types import execution +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", manifest={"Schedule",}, +) + + +class Schedule(proto.Message): + r"""The definition of a schedule. + Attributes: + name (str): + Output only. The name of this schedule. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + display_name (str): + Output only. Display name used for UI purposes. Name can + only contain alphanumeric characters, hyphens '-', and + underscores '_'. + description (str): + A brief description of this environment. + state (google.cloud.notebooks_v1.types.Schedule.State): + + cron_schedule (str): + Cron-tab formatted schedule by which the job will execute + Format: minute, hour, day of month, month, day of week e.g. + 0 0 \* \* WED = every Wednesday More examples: + https://crontab.guru/examples.html + time_zone (str): + Timezone on which the cron_schedule. The value of this field + must be a time zone name from the tz database. TZ Database: + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + + Note that some time zones include a provision for daylight + savings time. The rules for daylight saving time are + determined by the chosen tz. For UTC use the string "utc". + If a time zone is not specified, the default will be in UTC + (also known as GMT). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the schedule was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the schedule was last + updated. + execution_template (google.cloud.notebooks_v1.types.ExecutionTemplate): + Notebook Execution Template corresponding to + this schedule. + recent_executions (Sequence[google.cloud.notebooks_v1.types.Execution]): + Output only. The most recent execution names + triggered from this schedule and their + corresponding states. + """ + + class State(proto.Enum): + r"""State of the job.""" + STATE_UNSPECIFIED = 0 + ENABLED = 1 + PAUSED = 2 + DISABLED = 3 + UPDATE_FAILED = 4 + INITIALIZING = 5 + DELETING = 6 + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + state = proto.Field(proto.ENUM, number=4, enum=State,) + cron_schedule = proto.Field(proto.STRING, number=5,) + time_zone = proto.Field(proto.STRING, number=6,) + create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,) + execution_template = proto.Field( + proto.MESSAGE, number=9, message=execution.ExecutionTemplate, + ) + recent_executions = proto.RepeatedField( + proto.MESSAGE, number=10, message=execution.Execution, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1/types/service.py b/google/cloud/notebooks_v1/types/service.py new file mode 100644 index 0000000..3095f89 --- /dev/null +++ b/google/cloud/notebooks_v1/types/service.py @@ -0,0 +1,751 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.notebooks_v1.types import environment as gcn_environment +from google.cloud.notebooks_v1.types import execution as gcn_execution +from google.cloud.notebooks_v1.types import instance as gcn_instance +from google.cloud.notebooks_v1.types import instance_config +from google.cloud.notebooks_v1.types import schedule as gcn_schedule +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.notebooks.v1", + manifest={ + "OperationMetadata", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "RegisterInstanceRequest", + "SetInstanceAcceleratorRequest", + "SetInstanceMachineTypeRequest", + "UpdateInstanceConfigRequest", + "SetInstanceLabelsRequest", + "UpdateShieldedInstanceConfigRequest", + "DeleteInstanceRequest", + "StartInstanceRequest", + "StopInstanceRequest", + "ResetInstanceRequest", + "ReportInstanceInfoRequest", + "IsInstanceUpgradeableRequest", + "IsInstanceUpgradeableResponse", + "GetInstanceHealthRequest", + "GetInstanceHealthResponse", + "UpgradeInstanceRequest", + "RollbackInstanceRequest", + "UpgradeInstanceInternalRequest", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "GetEnvironmentRequest", + "CreateEnvironmentRequest", + "DeleteEnvironmentRequest", + "ListSchedulesRequest", + "ListSchedulesResponse", + "GetScheduleRequest", + "DeleteScheduleRequest", + "CreateScheduleRequest", + "TriggerScheduleRequest", + "ListExecutionsRequest", + "ListExecutionsResponse", + "GetExecutionRequest", + "DeleteExecutionRequest", + "CreateExecutionRequest", + }, +) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + target (str): + Server-defined resource path for the target + of the operation. + verb (str): + Name of the verb executed by the operation. + status_message (str): + Human-readable status of the operation, if + any. + requested_cancellation (bool): + Identifies whether the user has requested cancellation of + the operation. Operations that have successfully been + cancelled have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + API version used to start the operation. + endpoint (str): + API endpoint name of this operation. + """ + + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_message = proto.Field(proto.STRING, number=5,) + requested_cancellation = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) + endpoint = proto.Field(proto.STRING, number=8,) + + +class ListInstancesRequest(proto.Message): + r"""Request for listing notebook instances. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + page_size (int): + Maximum return size of the list call. + page_token (str): + A previous returned page token that can be + used to continue listing from the last result. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListInstancesResponse(proto.Message): + r"""Response for listing notebook instances. + Attributes: + instances (Sequence[google.cloud.notebooks_v1.types.Instance]): + A list of returned instances. + next_page_token (str): + Page token that can be used to continue + listing from the last result in the next list + call. + unreachable (Sequence[str]): + Locations that could not be reached. For example, + ['us-west1-a', 'us-central1-b']. A ListInstancesResponse + will only contain either instances or unreachables, + """ + + @property + def raw_page(self): + return self + + instances = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcn_instance.Instance, + ) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetInstanceRequest(proto.Message): + r"""Request for getting a notebook instance. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateInstanceRequest(proto.Message): + r"""Request for creating a notebook instance. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + instance_id (str): + Required. User-defined unique ID of this + instance. + instance (google.cloud.notebooks_v1.types.Instance): + Required. The instance to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) + instance = proto.Field(proto.MESSAGE, number=3, message=gcn_instance.Instance,) + + +class RegisterInstanceRequest(proto.Message): + r"""Request for registering a notebook instance. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + instance_id (str): + Required. User defined unique ID of this instance. The + ``instance_id`` must be 1 to 63 characters long and contain + only lowercase letters, numeric characters, and dashes. The + first character must be a lowercase letter and the last + character cannot be a dash. + """ + + parent = proto.Field(proto.STRING, number=1,) + instance_id = proto.Field(proto.STRING, number=2,) + + +class SetInstanceAcceleratorRequest(proto.Message): + r"""Request for setting instance accelerator. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + type_ (google.cloud.notebooks_v1.types.Instance.AcceleratorType): + Required. Type of this accelerator. + core_count (int): + Required. Count of cores of this accelerator. Note that not + all combinations of ``type`` and ``core_count`` are valid. + Check `GPUs on Compute + Engine `__ + to find a valid combination. TPUs are not supported. + """ + + name = proto.Field(proto.STRING, number=1,) + type_ = proto.Field( + proto.ENUM, number=2, enum=gcn_instance.Instance.AcceleratorType, + ) + core_count = proto.Field(proto.INT64, number=3,) + + +class SetInstanceMachineTypeRequest(proto.Message): + r"""Request for setting instance machine type. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + machine_type (str): + Required. The `Compute Engine machine + type `__. + """ + + name = proto.Field(proto.STRING, number=1,) + machine_type = proto.Field(proto.STRING, number=2,) + + +class UpdateInstanceConfigRequest(proto.Message): + r"""Request for updating instance configurations. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + config (google.cloud.notebooks_v1.types.InstanceConfig): + The instance configurations to be updated. + """ + + name = proto.Field(proto.STRING, number=1,) + config = proto.Field( + proto.MESSAGE, number=2, message=instance_config.InstanceConfig, + ) + + +class SetInstanceLabelsRequest(proto.Message): + r"""Request for setting instance labels. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + labels (Sequence[google.cloud.notebooks_v1.types.SetInstanceLabelsRequest.LabelsEntry]): + Labels to apply to this instance. + These can be later modified by the setLabels + method + """ + + name = proto.Field(proto.STRING, number=1,) + labels = proto.MapField(proto.STRING, proto.STRING, number=2,) + + +class UpdateShieldedInstanceConfigRequest(proto.Message): + r"""Request for updating the Shielded Instance config for a + notebook instance. You can only use this method on a stopped + instance + + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + shielded_instance_config (google.cloud.notebooks_v1.types.Instance.ShieldedInstanceConfig): + ShieldedInstance configuration to be updated. + """ + + name = proto.Field(proto.STRING, number=1,) + shielded_instance_config = proto.Field( + proto.MESSAGE, number=2, message=gcn_instance.Instance.ShieldedInstanceConfig, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Request for deleting a notebook instance. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class StartInstanceRequest(proto.Message): + r"""Request for starting a notebook instance + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class StopInstanceRequest(proto.Message): + r"""Request for stopping a notebook instance + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ResetInstanceRequest(proto.Message): + r"""Request for reseting a notebook instance + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ReportInstanceInfoRequest(proto.Message): + r"""Request for notebook instances to report information to + Notebooks API. + + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + vm_id (str): + Required. The VM hardware token for + authenticating the VM. + https://cloud.google.com/compute/docs/instances/verifying- + instance-identity + metadata (Sequence[google.cloud.notebooks_v1.types.ReportInstanceInfoRequest.MetadataEntry]): + The metadata reported to Notebooks API. This + will be merged to the instance metadata store + """ + + name = proto.Field(proto.STRING, number=1,) + vm_id = proto.Field(proto.STRING, number=2,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=3,) + + +class IsInstanceUpgradeableRequest(proto.Message): + r"""Request for checking if a notebook instance is upgradeable. + Attributes: + notebook_instance (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + notebook_instance = proto.Field(proto.STRING, number=1,) + + +class IsInstanceUpgradeableResponse(proto.Message): + r"""Response for checking if a notebook instance is upgradeable. + Attributes: + upgradeable (bool): + If an instance is upgradeable. + upgrade_version (str): + The version this instance will be upgraded to + if calling the upgrade endpoint. This field will + only be populated if field upgradeable is true. + upgrade_info (str): + Additional information about upgrade. + upgrade_image (str): + The new image self link this instance will be + upgraded to if calling the upgrade endpoint. + This field will only be populated if field + upgradeable is true. + """ + + upgradeable = proto.Field(proto.BOOL, number=1,) + upgrade_version = proto.Field(proto.STRING, number=2,) + upgrade_info = proto.Field(proto.STRING, number=3,) + upgrade_image = proto.Field(proto.STRING, number=4,) + + +class GetInstanceHealthRequest(proto.Message): + r"""Request for checking if a notebook instance is healthy. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class GetInstanceHealthResponse(proto.Message): + r"""Response for checking if a notebook instance is healthy. + Attributes: + health_state (google.cloud.notebooks_v1.types.GetInstanceHealthResponse.HealthState): + Output only. Runtime health_state. + health_info (Sequence[google.cloud.notebooks_v1.types.GetInstanceHealthResponse.HealthInfoEntry]): + Output only. Additional information about instance health. + Example: healthInfo": { "docker_proxy_agent_status": "1", + "docker_status": "1", "jupyterlab_api_status": "-1", + "jupyterlab_status": "-1", "updated": "2020-10-18 + 09:40:03.573409" } + """ + + class HealthState(proto.Enum): + r"""If an instance is healthy or not.""" + HEALTH_STATE_UNSPECIFIED = 0 + HEALTHY = 1 + UNHEALTHY = 2 + AGENT_NOT_INSTALLED = 3 + AGENT_NOT_RUNNING = 4 + + health_state = proto.Field(proto.ENUM, number=1, enum=HealthState,) + health_info = proto.MapField(proto.STRING, proto.STRING, number=2,) + + +class UpgradeInstanceRequest(proto.Message): + r"""Request for upgrading a notebook instance + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class RollbackInstanceRequest(proto.Message): + r"""Request for rollbacking a notebook instance + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + target_snapshot (str): + Required. The snapshot for rollback. + Example: "projects/test- + project/global/snapshots/krwlzipynril". + """ + + name = proto.Field(proto.STRING, number=1,) + target_snapshot = proto.Field(proto.STRING, number=2,) + + +class UpgradeInstanceInternalRequest(proto.Message): + r"""Request for upgrading a notebook instance from within the VM + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/instances/{instance_id}`` + vm_id (str): + Required. The VM hardware token for + authenticating the VM. + https://cloud.google.com/compute/docs/instances/verifying- + instance-identity + """ + + name = proto.Field(proto.STRING, number=1,) + vm_id = proto.Field(proto.STRING, number=2,) + + +class ListEnvironmentsRequest(proto.Message): + r"""Request for listing environments. + Attributes: + parent (str): + Required. Format: + ``projects/{project_id}/locations/{location}`` + page_size (int): + Maximum return size of the list call. + page_token (str): + A previous returned page token that can be + used to continue listing from the last result. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListEnvironmentsResponse(proto.Message): + r"""Response for listing environments. + Attributes: + environments (Sequence[google.cloud.notebooks_v1.types.Environment]): + A list of returned environments. + next_page_token (str): + A page token that can be used to continue + listing from the last result in the next list + call. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + environments = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcn_environment.Environment, + ) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetEnvironmentRequest(proto.Message): + r"""Request for getting a notebook environment. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateEnvironmentRequest(proto.Message): + r"""Request for creating a notebook environment. + Attributes: + parent (str): + Required. Format: + ``projects/{project_id}/locations/{location}`` + environment_id (str): + Required. User-defined unique ID of this environment. The + ``environment_id`` must be 1 to 63 characters long and + contain only lowercase letters, numeric characters, and + dashes. The first character must be a lowercase letter and + the last character cannot be a dash. + environment (google.cloud.notebooks_v1.types.Environment): + Required. The environment to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + environment_id = proto.Field(proto.STRING, number=2,) + environment = proto.Field( + proto.MESSAGE, number=3, message=gcn_environment.Environment, + ) + + +class DeleteEnvironmentRequest(proto.Message): + r"""Request for deleting a notebook environment. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/environments/{environment_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ListSchedulesRequest(proto.Message): + r"""Request for listing scheduled notebook job. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + page_size (int): + Maximum return size of the list call. + page_token (str): + A previous returned page token that can be + used to continue listing from the last result. + filter (str): + Filter applied to resulting schedules. + order_by (str): + Field to order results by. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) + + +class ListSchedulesResponse(proto.Message): + r"""Response for listing scheduled notebook job. + Attributes: + schedules (Sequence[google.cloud.notebooks_v1.types.Schedule]): + A list of returned instances. + next_page_token (str): + Page token that can be used to continue + listing from the last result in the next list + call. + unreachable (Sequence[str]): + Schedules that could not be reached. For example, + ['projects/{project_id}/location/{location}/schedules/monthly_digest', + 'projects/{project_id}/location/{location}/schedules/weekly_sentiment']. + """ + + @property + def raw_page(self): + return self + + schedules = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcn_schedule.Schedule, + ) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetScheduleRequest(proto.Message): + r"""Request for getting scheduled notebook. + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class DeleteScheduleRequest(proto.Message): + r"""Request for deleting an Schedule + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateScheduleRequest(proto.Message): + r"""Request for created scheduled notebooks + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + schedule_id (str): + Required. User-defined unique ID of this + schedule. + schedule (google.cloud.notebooks_v1.types.Schedule): + Required. The schedule to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + schedule_id = proto.Field(proto.STRING, number=2,) + schedule = proto.Field(proto.MESSAGE, number=3, message=gcn_schedule.Schedule,) + + +class TriggerScheduleRequest(proto.Message): + r"""Request for created scheduled notebooks + Attributes: + name (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}/schedules/{schedule_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ListExecutionsRequest(proto.Message): + r"""Request for listing scheduled notebook executions. + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + page_size (int): + Maximum return size of the list call. + page_token (str): + A previous returned page token that can be + used to continue listing from the last result. + filter (str): + Filter applied to resulting executions. Currently only + supports filtering executions by a specified schedule_id. + Format: "schedule_id=". + order_by (str): + Sort by field. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) + + +class ListExecutionsResponse(proto.Message): + r"""Response for listing scheduled notebook executions + Attributes: + executions (Sequence[google.cloud.notebooks_v1.types.Execution]): + A list of returned instances. + next_page_token (str): + Page token that can be used to continue + listing from the last result in the next list + call. + unreachable (Sequence[str]): + Executions IDs that could not be reached. For example, + ['projects/{project_id}/location/{location}/executions/imagenet_test1', + 'projects/{project_id}/location/{location}/executions/classifier_train1']. + """ + + @property + def raw_page(self): + return self + + executions = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcn_execution.Execution, + ) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetExecutionRequest(proto.Message): + r"""Request for getting scheduled notebook execution + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/executions/{execution_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class DeleteExecutionRequest(proto.Message): + r"""Request for deleting a scheduled notebook execution + Attributes: + name (str): + Required. Format: + ``projects/{project_id}/locations/{location}/executions/{execution_id}`` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateExecutionRequest(proto.Message): + r"""Request to create notebook execution + Attributes: + parent (str): + Required. Format: + ``parent=projects/{project_id}/locations/{location}`` + execution_id (str): + Required. User-defined unique ID of this + execution. + execution (google.cloud.notebooks_v1.types.Execution): + Required. The execution to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + execution_id = proto.Field(proto.STRING, number=2,) + execution = proto.Field(proto.MESSAGE, number=3, message=gcn_execution.Execution,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/notebooks_v1beta1/types/instance.py b/google/cloud/notebooks_v1beta1/types/instance.py index 3f862be..13eb275 100644 --- a/google/cloud/notebooks_v1beta1/types/instance.py +++ b/google/cloud/notebooks_v1beta1/types/instance.py @@ -26,6 +26,7 @@ class Instance(proto.Message): r"""The definition of a notebook instance. + Attributes: name (str): Output only. The name of this notebook instance. Format: diff --git a/owlbot.py b/owlbot.py index 6c30c52..d0c5161 100644 --- a/owlbot.py +++ b/owlbot.py @@ -21,9 +21,80 @@ common = gcp.CommonTemplates() -default_version = "v1beta1" +default_version = "v1" for library in s.get_staging_dirs(default_version): + + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 + s.replace(library / f"google/cloud/notebooks_{library.name}/types/instance.py", + r""". + Attributes:""", + r""".\n + Attributes:""", + ) + + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 + s.replace(library / f"google/cloud/notebooks_{library.name}/types/instance.py", + r""". + Attributes:""", + r""".\n + Attributes:""", + ) + + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 + s.replace(library / f"google/cloud/notebooks_{library.name}/types/instance.py", + r""". + Attributes:""", + r""".\n + Attributes:""", + ) + + + # Fix docstring formatting issue. Fix proposed upstream in cl/393820869. + s.replace(library / f"google/cloud/notebooks_{library.name}/types/instance.py", + """values: + NVME + SCSI""", + """values: + + * NVME + * SCSI""" + ) + + # Fix docstring formatting issue. Fix proposed upstream in cl/393820869. + s.replace(library / f"google/cloud/notebooks_{library.name}/types/runtime.py", + """values: + NVME + SCSI""", + """values: + + * NVME + * SCSI""" + ) + + # Fix docstring formatting issue. Fix proposed upstream in cl/393820869. + s.replace(library / f"google/cloud/notebooks_{library.name}/types/instance.py", + """values: + PERSISTENT + SCRATCH""", + """values: + + * PERSISTENT + * SCRATCH""", + ) + + # Fix docstring formatting issue. Fix proposed upstream in cl/393820869. + s.replace(library / f"google/cloud/notebooks_{library.name}/types/runtime.py", + """values: + PERSISTENT + SCRATCH""", + """values: + + * PERSISTENT + * SCRATCH""" + ) + + s.move(library, excludes=["scripts/fixup*.py", "setup.py", "README.rst", "docs/index.rst"]) s.remove_staging_dirs() diff --git a/tests/unit/gapic/notebooks_v1/__init__.py b/tests/unit/gapic/notebooks_v1/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/notebooks_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py b/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py new file mode 100644 index 0000000..7b89c19 --- /dev/null +++ b/tests/unit/gapic/notebooks_v1/test_managed_notebook_service.py @@ -0,0 +1,3200 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.notebooks_v1.services.managed_notebook_service import ( + ManagedNotebookServiceAsyncClient, +) +from google.cloud.notebooks_v1.services.managed_notebook_service import ( + ManagedNotebookServiceClient, +) +from google.cloud.notebooks_v1.services.managed_notebook_service import pagers +from google.cloud.notebooks_v1.services.managed_notebook_service import transports +from google.cloud.notebooks_v1.services.managed_notebook_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import event +from google.cloud.notebooks_v1.types import managed_service +from google.cloud.notebooks_v1.types import runtime +from google.cloud.notebooks_v1.types import runtime as gcn_runtime +from google.cloud.notebooks_v1.types import service +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ManagedNotebookServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ManagedNotebookServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ManagedNotebookServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ManagedNotebookServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ManagedNotebookServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ManagedNotebookServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [ManagedNotebookServiceClient, ManagedNotebookServiceAsyncClient,] +) +def test_managed_notebook_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "notebooks.googleapis.com:443" + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ManagedNotebookServiceGrpcTransport, "grpc"), + (transports.ManagedNotebookServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_managed_notebook_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class", [ManagedNotebookServiceClient, ManagedNotebookServiceAsyncClient,] +) +def test_managed_notebook_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "notebooks.googleapis.com:443" + + +def test_managed_notebook_service_client_get_transport_class(): + transport = ManagedNotebookServiceClient.get_transport_class() + available_transports = [ + transports.ManagedNotebookServiceGrpcTransport, + ] + assert transport in available_transports + + transport = ManagedNotebookServiceClient.get_transport_class("grpc") + assert transport == transports.ManagedNotebookServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ManagedNotebookServiceClient, + transports.ManagedNotebookServiceGrpcTransport, + "grpc", + ), + ( + ManagedNotebookServiceAsyncClient, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ManagedNotebookServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ManagedNotebookServiceClient), +) +@mock.patch.object( + ManagedNotebookServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ManagedNotebookServiceAsyncClient), +) +def test_managed_notebook_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ManagedNotebookServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ManagedNotebookServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ManagedNotebookServiceClient, + transports.ManagedNotebookServiceGrpcTransport, + "grpc", + "true", + ), + ( + ManagedNotebookServiceAsyncClient, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ManagedNotebookServiceClient, + transports.ManagedNotebookServiceGrpcTransport, + "grpc", + "false", + ), + ( + ManagedNotebookServiceAsyncClient, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ManagedNotebookServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ManagedNotebookServiceClient), +) +@mock.patch.object( + ManagedNotebookServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ManagedNotebookServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_managed_notebook_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ManagedNotebookServiceClient, + transports.ManagedNotebookServiceGrpcTransport, + "grpc", + ), + ( + ManagedNotebookServiceAsyncClient, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_managed_notebook_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ManagedNotebookServiceClient, + transports.ManagedNotebookServiceGrpcTransport, + "grpc", + ), + ( + ManagedNotebookServiceAsyncClient, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_managed_notebook_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_managed_notebook_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.notebooks_v1.services.managed_notebook_service.transports.ManagedNotebookServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ManagedNotebookServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_runtimes( + transport: str = "grpc", request_type=managed_service.ListRuntimesRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_service.ListRuntimesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_runtimes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ListRuntimesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRuntimesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_runtimes_from_dict(): + test_list_runtimes(request_type=dict) + + +def test_list_runtimes_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + client.list_runtimes() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ListRuntimesRequest() + + +@pytest.mark.asyncio +async def test_list_runtimes_async( + transport: str = "grpc_asyncio", request_type=managed_service.ListRuntimesRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_service.ListRuntimesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_runtimes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ListRuntimesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRuntimesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_runtimes_async_from_dict(): + await test_list_runtimes_async(request_type=dict) + + +def test_list_runtimes_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.ListRuntimesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + call.return_value = managed_service.ListRuntimesResponse() + client.list_runtimes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_runtimes_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.ListRuntimesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_service.ListRuntimesResponse() + ) + await client.list_runtimes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_runtimes_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_service.ListRuntimesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_runtimes(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_runtimes_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_runtimes( + managed_service.ListRuntimesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_runtimes_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = managed_service.ListRuntimesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + managed_service.ListRuntimesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_runtimes(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_runtimes_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_runtimes( + managed_service.ListRuntimesRequest(), parent="parent_value", + ) + + +def test_list_runtimes_pager(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(), runtime.Runtime(),], + next_page_token="abc", + ), + managed_service.ListRuntimesResponse(runtimes=[], next_page_token="def",), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(),], next_page_token="ghi", + ), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_runtimes(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, runtime.Runtime) for i in results) + + +def test_list_runtimes_pages(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_runtimes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(), runtime.Runtime(),], + next_page_token="abc", + ), + managed_service.ListRuntimesResponse(runtimes=[], next_page_token="def",), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(),], next_page_token="ghi", + ), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(),], + ), + RuntimeError, + ) + pages = list(client.list_runtimes(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_runtimes_async_pager(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_runtimes), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(), runtime.Runtime(),], + next_page_token="abc", + ), + managed_service.ListRuntimesResponse(runtimes=[], next_page_token="def",), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(),], next_page_token="ghi", + ), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(),], + ), + RuntimeError, + ) + async_pager = await client.list_runtimes(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, runtime.Runtime) for i in responses) + + +@pytest.mark.asyncio +async def test_list_runtimes_async_pages(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_runtimes), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(), runtime.Runtime(),], + next_page_token="abc", + ), + managed_service.ListRuntimesResponse(runtimes=[], next_page_token="def",), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(),], next_page_token="ghi", + ), + managed_service.ListRuntimesResponse( + runtimes=[runtime.Runtime(), runtime.Runtime(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_runtimes(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_runtime( + transport: str = "grpc", request_type=managed_service.GetRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = runtime.Runtime( + name="name_value", + state=runtime.Runtime.State.STARTING, + health_state=runtime.Runtime.HealthState.HEALTHY, + virtual_machine=runtime.VirtualMachine(instance_name="instance_name_value"), + ) + response = client.get_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.GetRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, runtime.Runtime) + assert response.name == "name_value" + assert response.state == runtime.Runtime.State.STARTING + assert response.health_state == runtime.Runtime.HealthState.HEALTHY + + +def test_get_runtime_from_dict(): + test_get_runtime(request_type=dict) + + +def test_get_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + client.get_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.GetRuntimeRequest() + + +@pytest.mark.asyncio +async def test_get_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.GetRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + runtime.Runtime( + name="name_value", + state=runtime.Runtime.State.STARTING, + health_state=runtime.Runtime.HealthState.HEALTHY, + ) + ) + response = await client.get_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.GetRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, runtime.Runtime) + assert response.name == "name_value" + assert response.state == runtime.Runtime.State.STARTING + assert response.health_state == runtime.Runtime.HealthState.HEALTHY + + +@pytest.mark.asyncio +async def test_get_runtime_async_from_dict(): + await test_get_runtime_async(request_type=dict) + + +def test_get_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.GetRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + call.return_value = runtime.Runtime() + client.get_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.GetRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(runtime.Runtime()) + await client.get_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = runtime.Runtime() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_runtime( + managed_service.GetRuntimeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = runtime.Runtime() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(runtime.Runtime()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_runtime( + managed_service.GetRuntimeRequest(), name="name_value", + ) + + +def test_create_runtime( + transport: str = "grpc", request_type=managed_service.CreateRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.CreateRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_runtime_from_dict(): + test_create_runtime(request_type=dict) + + +def test_create_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + client.create_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.CreateRuntimeRequest() + + +@pytest.mark.asyncio +async def test_create_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.CreateRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.CreateRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_runtime_async_from_dict(): + await test_create_runtime_async(request_type=dict) + + +def test_create_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.CreateRuntimeRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.CreateRuntimeRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_runtime( + parent="parent_value", + runtime_id="runtime_id_value", + runtime=gcn_runtime.Runtime(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].runtime_id == "runtime_id_value" + assert args[0].runtime == gcn_runtime.Runtime(name="name_value") + + +def test_create_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_runtime( + managed_service.CreateRuntimeRequest(), + parent="parent_value", + runtime_id="runtime_id_value", + runtime=gcn_runtime.Runtime(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_runtime( + parent="parent_value", + runtime_id="runtime_id_value", + runtime=gcn_runtime.Runtime(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].runtime_id == "runtime_id_value" + assert args[0].runtime == gcn_runtime.Runtime(name="name_value") + + +@pytest.mark.asyncio +async def test_create_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_runtime( + managed_service.CreateRuntimeRequest(), + parent="parent_value", + runtime_id="runtime_id_value", + runtime=gcn_runtime.Runtime(name="name_value"), + ) + + +def test_delete_runtime( + transport: str = "grpc", request_type=managed_service.DeleteRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.DeleteRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_runtime_from_dict(): + test_delete_runtime(request_type=dict) + + +def test_delete_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + client.delete_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.DeleteRuntimeRequest() + + +@pytest.mark.asyncio +async def test_delete_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.DeleteRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.DeleteRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_runtime_async_from_dict(): + await test_delete_runtime_async(request_type=dict) + + +def test_delete_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.DeleteRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.DeleteRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_runtime( + managed_service.DeleteRuntimeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_runtime( + managed_service.DeleteRuntimeRequest(), name="name_value", + ) + + +def test_start_runtime( + transport: str = "grpc", request_type=managed_service.StartRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.start_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.StartRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_runtime_from_dict(): + test_start_runtime(request_type=dict) + + +def test_start_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + client.start_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.StartRuntimeRequest() + + +@pytest.mark.asyncio +async def test_start_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.StartRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.start_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.StartRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_runtime_async_from_dict(): + await test_start_runtime_async(request_type=dict) + + +def test_start_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.StartRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.StartRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_start_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_start_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_runtime( + managed_service.StartRuntimeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_start_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_start_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_runtime( + managed_service.StartRuntimeRequest(), name="name_value", + ) + + +def test_stop_runtime( + transport: str = "grpc", request_type=managed_service.StopRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.stop_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.StopRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_runtime_from_dict(): + test_stop_runtime(request_type=dict) + + +def test_stop_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + client.stop_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.StopRuntimeRequest() + + +@pytest.mark.asyncio +async def test_stop_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.StopRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.stop_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.StopRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_runtime_async_from_dict(): + await test_stop_runtime_async(request_type=dict) + + +def test_stop_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.StopRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.StopRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_stop_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.stop_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_stop_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_runtime( + managed_service.StopRuntimeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_stop_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.stop_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_stop_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.stop_runtime( + managed_service.StopRuntimeRequest(), name="name_value", + ) + + +def test_switch_runtime( + transport: str = "grpc", request_type=managed_service.SwitchRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.switch_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.SwitchRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_switch_runtime_from_dict(): + test_switch_runtime(request_type=dict) + + +def test_switch_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + client.switch_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.SwitchRuntimeRequest() + + +@pytest.mark.asyncio +async def test_switch_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.SwitchRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.switch_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.SwitchRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_switch_runtime_async_from_dict(): + await test_switch_runtime_async(request_type=dict) + + +def test_switch_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.SwitchRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.switch_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_switch_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.SwitchRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.switch_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_switch_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.switch_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_switch_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.switch_runtime( + managed_service.SwitchRuntimeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_switch_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.switch_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.switch_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_switch_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.switch_runtime( + managed_service.SwitchRuntimeRequest(), name="name_value", + ) + + +def test_reset_runtime( + transport: str = "grpc", request_type=managed_service.ResetRuntimeRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reset_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ResetRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reset_runtime_from_dict(): + test_reset_runtime(request_type=dict) + + +def test_reset_runtime_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + client.reset_runtime() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ResetRuntimeRequest() + + +@pytest.mark.asyncio +async def test_reset_runtime_async( + transport: str = "grpc_asyncio", request_type=managed_service.ResetRuntimeRequest +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.reset_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ResetRuntimeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reset_runtime_async_from_dict(): + await test_reset_runtime_async(request_type=dict) + + +def test_reset_runtime_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.ResetRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.reset_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reset_runtime_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.ResetRuntimeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.reset_runtime(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_reset_runtime_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reset_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_reset_runtime_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_runtime( + managed_service.ResetRuntimeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_reset_runtime_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_runtime), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reset_runtime(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_reset_runtime_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reset_runtime( + managed_service.ResetRuntimeRequest(), name="name_value", + ) + + +def test_report_runtime_event( + transport: str = "grpc", request_type=managed_service.ReportRuntimeEventRequest +): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.report_runtime_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ReportRuntimeEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_report_runtime_event_from_dict(): + test_report_runtime_event(request_type=dict) + + +def test_report_runtime_event_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + client.report_runtime_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ReportRuntimeEventRequest() + + +@pytest.mark.asyncio +async def test_report_runtime_event_async( + transport: str = "grpc_asyncio", + request_type=managed_service.ReportRuntimeEventRequest, +): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.report_runtime_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == managed_service.ReportRuntimeEventRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_report_runtime_event_async_from_dict(): + await test_report_runtime_event_async(request_type=dict) + + +def test_report_runtime_event_field_headers(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.ReportRuntimeEventRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.report_runtime_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_report_runtime_event_field_headers_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = managed_service.ReportRuntimeEventRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.report_runtime_event(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_report_runtime_event_flattened(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.report_runtime_event(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_report_runtime_event_flattened_error(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.report_runtime_event( + managed_service.ReportRuntimeEventRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_report_runtime_event_flattened_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_runtime_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.report_runtime_event(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_report_runtime_event_flattened_error_async(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.report_runtime_event( + managed_service.ReportRuntimeEventRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ManagedNotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ManagedNotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ManagedNotebookServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ManagedNotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ManagedNotebookServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ManagedNotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ManagedNotebookServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ManagedNotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ManagedNotebookServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedNotebookServiceGrpcTransport, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance(client.transport, transports.ManagedNotebookServiceGrpcTransport,) + + +def test_managed_notebook_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ManagedNotebookServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_managed_notebook_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.notebooks_v1.services.managed_notebook_service.transports.ManagedNotebookServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ManagedNotebookServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_runtimes", + "get_runtime", + "create_runtime", + "delete_runtime", + "start_runtime", + "stop_runtime", + "switch_runtime", + "reset_runtime", + "report_runtime_event", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +@requires_google_auth_gte_1_25_0 +def test_managed_notebook_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.notebooks_v1.services.managed_notebook_service.transports.ManagedNotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ManagedNotebookServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_managed_notebook_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.notebooks_v1.services.managed_notebook_service.transports.ManagedNotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ManagedNotebookServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_managed_notebook_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.notebooks_v1.services.managed_notebook_service.transports.ManagedNotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ManagedNotebookServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_managed_notebook_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ManagedNotebookServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_managed_notebook_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ManagedNotebookServiceClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedNotebookServiceGrpcTransport, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_managed_notebook_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedNotebookServiceGrpcTransport, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_managed_notebook_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ManagedNotebookServiceGrpcTransport, grpc_helpers), + (transports.ManagedNotebookServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_managed_notebook_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "notebooks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="notebooks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedNotebookServiceGrpcTransport, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + ], +) +def test_managed_notebook_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_managed_notebook_service_host_no_port(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="notebooks.googleapis.com" + ), + ) + assert client.transport._host == "notebooks.googleapis.com:443" + + +def test_managed_notebook_service_host_with_port(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="notebooks.googleapis.com:8000" + ), + ) + assert client.transport._host == "notebooks.googleapis.com:8000" + + +def test_managed_notebook_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ManagedNotebookServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_managed_notebook_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ManagedNotebookServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedNotebookServiceGrpcTransport, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + ], +) +def test_managed_notebook_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ManagedNotebookServiceGrpcTransport, + transports.ManagedNotebookServiceGrpcAsyncIOTransport, + ], +) +def test_managed_notebook_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_managed_notebook_service_grpc_lro_client(): + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_managed_notebook_service_grpc_lro_async_client(): + client = ManagedNotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_runtime_path(): + project = "squid" + location = "clam" + runtime = "whelk" + expected = "projects/{project}/locations/{location}/runtimes/{runtime}".format( + project=project, location=location, runtime=runtime, + ) + actual = ManagedNotebookServiceClient.runtime_path(project, location, runtime) + assert expected == actual + + +def test_parse_runtime_path(): + expected = { + "project": "octopus", + "location": "oyster", + "runtime": "nudibranch", + } + path = ManagedNotebookServiceClient.runtime_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedNotebookServiceClient.parse_runtime_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ManagedNotebookServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ManagedNotebookServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedNotebookServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder,) + actual = ManagedNotebookServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ManagedNotebookServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedNotebookServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization,) + actual = ManagedNotebookServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ManagedNotebookServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedNotebookServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project,) + actual = ManagedNotebookServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ManagedNotebookServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedNotebookServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = ManagedNotebookServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ManagedNotebookServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ManagedNotebookServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ManagedNotebookServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ManagedNotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ManagedNotebookServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ManagedNotebookServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/notebooks_v1/test_notebook_service.py b/tests/unit/gapic/notebooks_v1/test_notebook_service.py new file mode 100644 index 0000000..79c2d3a --- /dev/null +++ b/tests/unit/gapic/notebooks_v1/test_notebook_service.py @@ -0,0 +1,7666 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.notebooks_v1.services.notebook_service import ( + NotebookServiceAsyncClient, +) +from google.cloud.notebooks_v1.services.notebook_service import NotebookServiceClient +from google.cloud.notebooks_v1.services.notebook_service import pagers +from google.cloud.notebooks_v1.services.notebook_service import transports +from google.cloud.notebooks_v1.services.notebook_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) +from google.cloud.notebooks_v1.types import environment +from google.cloud.notebooks_v1.types import environment as gcn_environment +from google.cloud.notebooks_v1.types import execution +from google.cloud.notebooks_v1.types import execution as gcn_execution +from google.cloud.notebooks_v1.types import instance +from google.cloud.notebooks_v1.types import instance as gcn_instance +from google.cloud.notebooks_v1.types import instance_config +from google.cloud.notebooks_v1.types import schedule +from google.cloud.notebooks_v1.types import schedule as gcn_schedule +from google.cloud.notebooks_v1.types import service +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NotebookServiceClient._get_default_mtls_endpoint(None) is None + assert ( + NotebookServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + NotebookServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + NotebookServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + NotebookServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + NotebookServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,] +) +def test_notebook_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "notebooks.googleapis.com:443" + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.NotebookServiceGrpcTransport, "grpc"), + (transports.NotebookServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_notebook_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class", [NotebookServiceClient, NotebookServiceAsyncClient,] +) +def test_notebook_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "notebooks.googleapis.com:443" + + +def test_notebook_service_client_get_transport_class(): + transport = NotebookServiceClient.get_transport_class() + available_transports = [ + transports.NotebookServiceGrpcTransport, + ] + assert transport in available_transports + + transport = NotebookServiceClient.get_transport_class("grpc") + assert transport == transports.NotebookServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (NotebookServiceClient, transports.NotebookServiceGrpcTransport, "grpc"), + ( + NotebookServiceAsyncClient, + transports.NotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + NotebookServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NotebookServiceClient), +) +@mock.patch.object( + NotebookServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NotebookServiceAsyncClient), +) +def test_notebook_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NotebookServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NotebookServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + NotebookServiceClient, + transports.NotebookServiceGrpcTransport, + "grpc", + "true", + ), + ( + NotebookServiceAsyncClient, + transports.NotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + NotebookServiceClient, + transports.NotebookServiceGrpcTransport, + "grpc", + "false", + ), + ( + NotebookServiceAsyncClient, + transports.NotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + NotebookServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NotebookServiceClient), +) +@mock.patch.object( + NotebookServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NotebookServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_notebook_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (NotebookServiceClient, transports.NotebookServiceGrpcTransport, "grpc"), + ( + NotebookServiceAsyncClient, + transports.NotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_notebook_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (NotebookServiceClient, transports.NotebookServiceGrpcTransport, "grpc"), + ( + NotebookServiceAsyncClient, + transports.NotebookServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_notebook_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_notebook_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.notebooks_v1.services.notebook_service.transports.NotebookServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = NotebookServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_instances( + transport: str = "grpc", request_type=service.ListInstancesRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_from_dict(): + test_list_instances(request_type=dict) + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = service.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListInstancesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_instances_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_instances_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + service.ListInstancesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + service.ListInstancesRequest(), parent="parent_value", + ) + + +def test_list_instances_pager(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + instance.Instance(), + instance.Instance(), + instance.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse(instances=[], next_page_token="def",), + service.ListInstancesResponse( + instances=[instance.Instance(),], next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[instance.Instance(), instance.Instance(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, instance.Instance) for i in results) + + +def test_list_instances_pages(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + instance.Instance(), + instance.Instance(), + instance.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse(instances=[], next_page_token="def",), + service.ListInstancesResponse( + instances=[instance.Instance(),], next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[instance.Instance(), instance.Instance(),], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + instance.Instance(), + instance.Instance(), + instance.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse(instances=[], next_page_token="def",), + service.ListInstancesResponse( + instances=[instance.Instance(),], next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[instance.Instance(), instance.Instance(),], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, instance.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListInstancesResponse( + instances=[ + instance.Instance(), + instance.Instance(), + instance.Instance(), + ], + next_page_token="abc", + ), + service.ListInstancesResponse(instances=[], next_page_token="def",), + service.ListInstancesResponse( + instances=[instance.Instance(),], next_page_token="ghi", + ), + service.ListInstancesResponse( + instances=[instance.Instance(), instance.Instance(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instances(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_instance(transport: str = "grpc", request_type=service.GetInstanceRequest): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instance.Instance( + name="name_value", + post_startup_script="post_startup_script_value", + proxy_uri="proxy_uri_value", + instance_owners=["instance_owners_value"], + service_account="service_account_value", + service_account_scopes=["service_account_scopes_value"], + machine_type="machine_type_value", + state=instance.Instance.State.STARTING, + install_gpu_driver=True, + custom_gpu_driver_path="custom_gpu_driver_path_value", + boot_disk_type=instance.Instance.DiskType.PD_STANDARD, + boot_disk_size_gb=1792, + data_disk_type=instance.Instance.DiskType.PD_STANDARD, + data_disk_size_gb=1766, + no_remove_data_disk=True, + disk_encryption=instance.Instance.DiskEncryption.GMEK, + kms_key="kms_key_value", + no_public_ip=True, + no_proxy_access=True, + network="network_value", + subnet="subnet_value", + tags=["tags_value"], + nic_type=instance.Instance.NicType.VIRTIO_NET, + vm_image=environment.VmImage(project="project_value"), + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, instance.Instance) + assert response.name == "name_value" + assert response.post_startup_script == "post_startup_script_value" + assert response.proxy_uri == "proxy_uri_value" + assert response.instance_owners == ["instance_owners_value"] + assert response.service_account == "service_account_value" + assert response.service_account_scopes == ["service_account_scopes_value"] + assert response.machine_type == "machine_type_value" + assert response.state == instance.Instance.State.STARTING + assert response.install_gpu_driver is True + assert response.custom_gpu_driver_path == "custom_gpu_driver_path_value" + assert response.boot_disk_type == instance.Instance.DiskType.PD_STANDARD + assert response.boot_disk_size_gb == 1792 + assert response.data_disk_type == instance.Instance.DiskType.PD_STANDARD + assert response.data_disk_size_gb == 1766 + assert response.no_remove_data_disk is True + assert response.disk_encryption == instance.Instance.DiskEncryption.GMEK + assert response.kms_key == "kms_key_value" + assert response.no_public_ip is True + assert response.no_proxy_access is True + assert response.network == "network_value" + assert response.subnet == "subnet_value" + assert response.tags == ["tags_value"] + assert response.nic_type == instance.Instance.NicType.VIRTIO_NET + + +def test_get_instance_from_dict(): + test_get_instance(request_type=dict) + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.Instance( + name="name_value", + post_startup_script="post_startup_script_value", + proxy_uri="proxy_uri_value", + instance_owners=["instance_owners_value"], + service_account="service_account_value", + service_account_scopes=["service_account_scopes_value"], + machine_type="machine_type_value", + state=instance.Instance.State.STARTING, + install_gpu_driver=True, + custom_gpu_driver_path="custom_gpu_driver_path_value", + boot_disk_type=instance.Instance.DiskType.PD_STANDARD, + boot_disk_size_gb=1792, + data_disk_type=instance.Instance.DiskType.PD_STANDARD, + data_disk_size_gb=1766, + no_remove_data_disk=True, + disk_encryption=instance.Instance.DiskEncryption.GMEK, + kms_key="kms_key_value", + no_public_ip=True, + no_proxy_access=True, + network="network_value", + subnet="subnet_value", + tags=["tags_value"], + nic_type=instance.Instance.NicType.VIRTIO_NET, + ) + ) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, instance.Instance) + assert response.name == "name_value" + assert response.post_startup_script == "post_startup_script_value" + assert response.proxy_uri == "proxy_uri_value" + assert response.instance_owners == ["instance_owners_value"] + assert response.service_account == "service_account_value" + assert response.service_account_scopes == ["service_account_scopes_value"] + assert response.machine_type == "machine_type_value" + assert response.state == instance.Instance.State.STARTING + assert response.install_gpu_driver is True + assert response.custom_gpu_driver_path == "custom_gpu_driver_path_value" + assert response.boot_disk_type == instance.Instance.DiskType.PD_STANDARD + assert response.boot_disk_size_gb == 1792 + assert response.data_disk_type == instance.Instance.DiskType.PD_STANDARD + assert response.data_disk_size_gb == 1766 + assert response.no_remove_data_disk is True + assert response.disk_encryption == instance.Instance.DiskEncryption.GMEK + assert response.kms_key == "kms_key_value" + assert response.no_public_ip is True + assert response.no_proxy_access is True + assert response.network == "network_value" + assert response.subnet == "subnet_value" + assert response.tags == ["tags_value"] + assert response.nic_type == instance.Instance.NicType.VIRTIO_NET + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = instance.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_instance_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instance.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_instance_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + service.GetInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instance.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + service.GetInstanceRequest(), name="name_value", + ) + + +def test_create_instance( + transport: str = "grpc", request_type=service.CreateInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_from_dict(): + test_create_instance(request_type=dict) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateInstanceRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_instance_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=gcn_instance.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].instance == gcn_instance.Instance(name="name_value") + assert args[0].instance_id == "instance_id_value" + + +def test_create_instance_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=gcn_instance.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=gcn_instance.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].instance == gcn_instance.Instance(name="name_value") + assert args[0].instance_id == "instance_id_value" + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + service.CreateInstanceRequest(), + parent="parent_value", + instance=gcn_instance.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_register_instance( + transport: str = "grpc", request_type=service.RegisterInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.register_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RegisterInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_register_instance_from_dict(): + test_register_instance(request_type=dict) + + +def test_register_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_instance), "__call__" + ) as call: + client.register_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RegisterInstanceRequest() + + +@pytest.mark.asyncio +async def test_register_instance_async( + transport: str = "grpc_asyncio", request_type=service.RegisterInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.register_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RegisterInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_register_instance_async_from_dict(): + await test_register_instance_async(request_type=dict) + + +def test_register_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RegisterInstanceRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.register_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_register_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RegisterInstanceRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.register_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.register_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_set_instance_accelerator( + transport: str = "grpc", request_type=service.SetInstanceAcceleratorRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_accelerator), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.set_instance_accelerator(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceAcceleratorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_set_instance_accelerator_from_dict(): + test_set_instance_accelerator(request_type=dict) + + +def test_set_instance_accelerator_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_accelerator), "__call__" + ) as call: + client.set_instance_accelerator() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceAcceleratorRequest() + + +@pytest.mark.asyncio +async def test_set_instance_accelerator_async( + transport: str = "grpc_asyncio", request_type=service.SetInstanceAcceleratorRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_accelerator), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.set_instance_accelerator(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceAcceleratorRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_set_instance_accelerator_async_from_dict(): + await test_set_instance_accelerator_async(request_type=dict) + + +def test_set_instance_accelerator_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SetInstanceAcceleratorRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_accelerator), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.set_instance_accelerator(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_instance_accelerator_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SetInstanceAcceleratorRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_accelerator), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.set_instance_accelerator(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_set_instance_machine_type( + transport: str = "grpc", request_type=service.SetInstanceMachineTypeRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_machine_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.set_instance_machine_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceMachineTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_set_instance_machine_type_from_dict(): + test_set_instance_machine_type(request_type=dict) + + +def test_set_instance_machine_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_machine_type), "__call__" + ) as call: + client.set_instance_machine_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceMachineTypeRequest() + + +@pytest.mark.asyncio +async def test_set_instance_machine_type_async( + transport: str = "grpc_asyncio", request_type=service.SetInstanceMachineTypeRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_machine_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.set_instance_machine_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceMachineTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_set_instance_machine_type_async_from_dict(): + await test_set_instance_machine_type_async(request_type=dict) + + +def test_set_instance_machine_type_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SetInstanceMachineTypeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_machine_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.set_instance_machine_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_instance_machine_type_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SetInstanceMachineTypeRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_machine_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.set_instance_machine_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_instance_config( + transport: str = "grpc", request_type=service.UpdateInstanceConfigRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_config_from_dict(): + test_update_instance_config(request_type=dict) + + +def test_update_instance_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), "__call__" + ) as call: + client.update_instance_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceConfigRequest() + + +@pytest.mark.asyncio +async def test_update_instance_config_async( + transport: str = "grpc_asyncio", request_type=service.UpdateInstanceConfigRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateInstanceConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_config_async_from_dict(): + await test_update_instance_config_async(request_type=dict) + + +def test_update_instance_config_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceConfigRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_instance_config_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateInstanceConfigRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_shielded_instance_config( + transport: str = "grpc", request_type=service.UpdateShieldedInstanceConfigRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_shielded_instance_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_shielded_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateShieldedInstanceConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_shielded_instance_config_from_dict(): + test_update_shielded_instance_config(request_type=dict) + + +def test_update_shielded_instance_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_shielded_instance_config), "__call__" + ) as call: + client.update_shielded_instance_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateShieldedInstanceConfigRequest() + + +@pytest.mark.asyncio +async def test_update_shielded_instance_config_async( + transport: str = "grpc_asyncio", + request_type=service.UpdateShieldedInstanceConfigRequest, +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_shielded_instance_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_shielded_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateShieldedInstanceConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_shielded_instance_config_async_from_dict(): + await test_update_shielded_instance_config_async(request_type=dict) + + +def test_update_shielded_instance_config_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateShieldedInstanceConfigRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_shielded_instance_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_shielded_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_shielded_instance_config_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateShieldedInstanceConfigRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_shielded_instance_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_shielded_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_set_instance_labels( + transport: str = "grpc", request_type=service.SetInstanceLabelsRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_labels), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.set_instance_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceLabelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_set_instance_labels_from_dict(): + test_set_instance_labels(request_type=dict) + + +def test_set_instance_labels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_labels), "__call__" + ) as call: + client.set_instance_labels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceLabelsRequest() + + +@pytest.mark.asyncio +async def test_set_instance_labels_async( + transport: str = "grpc_asyncio", request_type=service.SetInstanceLabelsRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_labels), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.set_instance_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.SetInstanceLabelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_set_instance_labels_async_from_dict(): + await test_set_instance_labels_async(request_type=dict) + + +def test_set_instance_labels_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SetInstanceLabelsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_labels), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.set_instance_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_instance_labels_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.SetInstanceLabelsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_instance_labels), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.set_instance_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_instance( + transport: str = "grpc", request_type=service.DeleteInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_from_dict(): + test_delete_instance(request_type=dict) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_instance_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + service.DeleteInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + service.DeleteInstanceRequest(), name="name_value", + ) + + +def test_start_instance( + transport: str = "grpc", request_type=service.StartInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.start_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.StartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_instance_from_dict(): + test_start_instance(request_type=dict) + + +def test_start_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_instance), "__call__") as call: + client.start_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.StartInstanceRequest() + + +@pytest.mark.asyncio +async def test_start_instance_async( + transport: str = "grpc_asyncio", request_type=service.StartInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.start_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.StartInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_instance_async_from_dict(): + await test_start_instance_async(request_type=dict) + + +def test_start_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.StartInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.StartInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_stop_instance( + transport: str = "grpc", request_type=service.StopInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.stop_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.StopInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_instance_from_dict(): + test_stop_instance(request_type=dict) + + +def test_stop_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: + client.stop_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.StopInstanceRequest() + + +@pytest.mark.asyncio +async def test_stop_instance_async( + transport: str = "grpc_asyncio", request_type=service.StopInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.stop_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.StopInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_instance_async_from_dict(): + await test_stop_instance_async(request_type=dict) + + +def test_stop_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.StopInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.StopInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_reset_instance( + transport: str = "grpc", request_type=service.ResetInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.reset_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ResetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reset_instance_from_dict(): + test_reset_instance(request_type=dict) + + +def test_reset_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: + client.reset_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ResetInstanceRequest() + + +@pytest.mark.asyncio +async def test_reset_instance_async( + transport: str = "grpc_asyncio", request_type=service.ResetInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.reset_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ResetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reset_instance_async_from_dict(): + await test_reset_instance_async(request_type=dict) + + +def test_reset_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ResetInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.reset_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reset_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ResetInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.reset_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.reset_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_report_instance_info( + transport: str = "grpc", request_type=service.ReportInstanceInfoRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_instance_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.report_instance_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ReportInstanceInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_report_instance_info_from_dict(): + test_report_instance_info(request_type=dict) + + +def test_report_instance_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_instance_info), "__call__" + ) as call: + client.report_instance_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ReportInstanceInfoRequest() + + +@pytest.mark.asyncio +async def test_report_instance_info_async( + transport: str = "grpc_asyncio", request_type=service.ReportInstanceInfoRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_instance_info), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.report_instance_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ReportInstanceInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_report_instance_info_async_from_dict(): + await test_report_instance_info_async(request_type=dict) + + +def test_report_instance_info_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ReportInstanceInfoRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_instance_info), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.report_instance_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_report_instance_info_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ReportInstanceInfoRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_instance_info), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.report_instance_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_is_instance_upgradeable( + transport: str = "grpc", request_type=service.IsInstanceUpgradeableRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.is_instance_upgradeable), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.IsInstanceUpgradeableResponse( + upgradeable=True, + upgrade_version="upgrade_version_value", + upgrade_info="upgrade_info_value", + upgrade_image="upgrade_image_value", + ) + response = client.is_instance_upgradeable(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.IsInstanceUpgradeableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IsInstanceUpgradeableResponse) + assert response.upgradeable is True + assert response.upgrade_version == "upgrade_version_value" + assert response.upgrade_info == "upgrade_info_value" + assert response.upgrade_image == "upgrade_image_value" + + +def test_is_instance_upgradeable_from_dict(): + test_is_instance_upgradeable(request_type=dict) + + +def test_is_instance_upgradeable_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.is_instance_upgradeable), "__call__" + ) as call: + client.is_instance_upgradeable() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.IsInstanceUpgradeableRequest() + + +@pytest.mark.asyncio +async def test_is_instance_upgradeable_async( + transport: str = "grpc_asyncio", request_type=service.IsInstanceUpgradeableRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.is_instance_upgradeable), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IsInstanceUpgradeableResponse( + upgradeable=True, + upgrade_version="upgrade_version_value", + upgrade_info="upgrade_info_value", + upgrade_image="upgrade_image_value", + ) + ) + response = await client.is_instance_upgradeable(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.IsInstanceUpgradeableRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.IsInstanceUpgradeableResponse) + assert response.upgradeable is True + assert response.upgrade_version == "upgrade_version_value" + assert response.upgrade_info == "upgrade_info_value" + assert response.upgrade_image == "upgrade_image_value" + + +@pytest.mark.asyncio +async def test_is_instance_upgradeable_async_from_dict(): + await test_is_instance_upgradeable_async(request_type=dict) + + +def test_is_instance_upgradeable_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.IsInstanceUpgradeableRequest() + + request.notebook_instance = "notebook_instance/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.is_instance_upgradeable), "__call__" + ) as call: + call.return_value = service.IsInstanceUpgradeableResponse() + client.is_instance_upgradeable(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "notebook_instance=notebook_instance/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_is_instance_upgradeable_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.IsInstanceUpgradeableRequest() + + request.notebook_instance = "notebook_instance/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.is_instance_upgradeable), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.IsInstanceUpgradeableResponse() + ) + await client.is_instance_upgradeable(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "notebook_instance=notebook_instance/value", + ) in kw["metadata"] + + +def test_get_instance_health( + transport: str = "grpc", request_type=service.GetInstanceHealthRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GetInstanceHealthResponse( + health_state=service.GetInstanceHealthResponse.HealthState.HEALTHY, + ) + response = client.get_instance_health(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceHealthRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GetInstanceHealthResponse) + assert ( + response.health_state == service.GetInstanceHealthResponse.HealthState.HEALTHY + ) + + +def test_get_instance_health_from_dict(): + test_get_instance_health(request_type=dict) + + +def test_get_instance_health_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + client.get_instance_health() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceHealthRequest() + + +@pytest.mark.asyncio +async def test_get_instance_health_async( + transport: str = "grpc_asyncio", request_type=service.GetInstanceHealthRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GetInstanceHealthResponse( + health_state=service.GetInstanceHealthResponse.HealthState.HEALTHY, + ) + ) + response = await client.get_instance_health(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetInstanceHealthRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.GetInstanceHealthResponse) + assert ( + response.health_state == service.GetInstanceHealthResponse.HealthState.HEALTHY + ) + + +@pytest.mark.asyncio +async def test_get_instance_health_async_from_dict(): + await test_get_instance_health_async(request_type=dict) + + +def test_get_instance_health_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceHealthRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + call.return_value = service.GetInstanceHealthResponse() + client.get_instance_health(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_health_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetInstanceHealthRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GetInstanceHealthResponse() + ) + await client.get_instance_health(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_instance_health_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GetInstanceHealthResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_health(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_instance_health_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_health( + service.GetInstanceHealthRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_health_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_health), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.GetInstanceHealthResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.GetInstanceHealthResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_health(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_instance_health_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_health( + service.GetInstanceHealthRequest(), name="name_value", + ) + + +def test_upgrade_instance( + transport: str = "grpc", request_type=service.UpgradeInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpgradeInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_upgrade_instance_from_dict(): + test_upgrade_instance(request_type=dict) + + +def test_upgrade_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + client.upgrade_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpgradeInstanceRequest() + + +@pytest.mark.asyncio +async def test_upgrade_instance_async( + transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpgradeInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_upgrade_instance_async_from_dict(): + await test_upgrade_instance_async(request_type=dict) + + +def test_upgrade_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpgradeInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_upgrade_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpgradeInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_rollback_instance( + transport: str = "grpc", request_type=service.RollbackInstanceRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.rollback_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.RollbackInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_rollback_instance_from_dict(): + test_rollback_instance(request_type=dict) + + +def test_rollback_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_instance), "__call__" + ) as call: + client.rollback_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.RollbackInstanceRequest() + + +@pytest.mark.asyncio +async def test_rollback_instance_async( + transport: str = "grpc_asyncio", request_type=service.RollbackInstanceRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_instance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.rollback_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.RollbackInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_rollback_instance_async_from_dict(): + await test_rollback_instance_async(request_type=dict) + + +def test_rollback_instance_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RollbackInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.rollback_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_instance_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.RollbackInstanceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.rollback_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_upgrade_instance_internal( + transport: str = "grpc", request_type=service.UpgradeInstanceInternalRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance_internal), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.upgrade_instance_internal(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpgradeInstanceInternalRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_upgrade_instance_internal_from_dict(): + test_upgrade_instance_internal(request_type=dict) + + +def test_upgrade_instance_internal_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance_internal), "__call__" + ) as call: + client.upgrade_instance_internal() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpgradeInstanceInternalRequest() + + +@pytest.mark.asyncio +async def test_upgrade_instance_internal_async( + transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceInternalRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance_internal), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.upgrade_instance_internal(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpgradeInstanceInternalRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_upgrade_instance_internal_async_from_dict(): + await test_upgrade_instance_internal_async(request_type=dict) + + +def test_upgrade_instance_internal_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpgradeInstanceInternalRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance_internal), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.upgrade_instance_internal(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_upgrade_instance_internal_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpgradeInstanceInternalRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance_internal), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.upgrade_instance_internal(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_list_environments( + transport: str = "grpc", request_type=service.ListEnvironmentsRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_environments_from_dict(): + test_list_environments(request_type=dict) + + +def test_list_environments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + client.list_environments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest() + + +@pytest.mark.asyncio +async def test_list_environments_async( + transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListEnvironmentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_environments_async_from_dict(): + await test_list_environments_async(request_type=dict) + + +def test_list_environments_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + call.return_value = service.ListEnvironmentsResponse() + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_environments_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListEnvironmentsResponse() + ) + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_environments_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_environments(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_environments_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + service.ListEnvironmentsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_environments_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListEnvironmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_environments(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_environments_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_environments( + service.ListEnvironmentsRequest(), parent="parent_value", + ) + + +def test_list_environments_pager(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + environment.Environment(), + environment.Environment(), + environment.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[environment.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[environment.Environment(), environment.Environment(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_environments(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, environment.Environment) for i in results) + + +def test_list_environments_pages(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + environment.Environment(), + environment.Environment(), + environment.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[environment.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[environment.Environment(), environment.Environment(),], + ), + RuntimeError, + ) + pages = list(client.list_environments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_environments_async_pager(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + environment.Environment(), + environment.Environment(), + environment.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[environment.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[environment.Environment(), environment.Environment(),], + ), + RuntimeError, + ) + async_pager = await client.list_environments(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, environment.Environment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_environments_async_pages(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + environment.Environment(), + environment.Environment(), + environment.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[environment.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[environment.Environment(), environment.Environment(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_environments(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_environment( + transport: str = "grpc", request_type=service.GetEnvironmentRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = environment.Environment( + name="name_value", + display_name="display_name_value", + description="description_value", + post_startup_script="post_startup_script_value", + vm_image=environment.VmImage(project="project_value"), + ) + response = client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environment.Environment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.post_startup_script == "post_startup_script_value" + + +def test_get_environment_from_dict(): + test_get_environment(request_type=dict) + + +def test_get_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + client.get_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_get_environment_async( + transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environment.Environment( + name="name_value", + display_name="display_name_value", + description="description_value", + post_startup_script="post_startup_script_value", + ) + ) + response = await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environment.Environment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.post_startup_script == "post_startup_script_value" + + +@pytest.mark.asyncio +async def test_get_environment_async_from_dict(): + await test_get_environment_async(request_type=dict) + + +def test_get_environment_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + call.return_value = environment.Environment() + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_environment_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environment.Environment() + ) + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_environment_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = environment.Environment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_environment_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + service.GetEnvironmentRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_environment_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = environment.Environment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environment.Environment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_environment_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_environment( + service.GetEnvironmentRequest(), name="name_value", + ) + + +def test_create_environment( + transport: str = "grpc", request_type=service.CreateEnvironmentRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_environment_from_dict(): + test_create_environment(request_type=dict) + + +def test_create_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + client.create_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_create_environment_async( + transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_environment_async_from_dict(): + await test_create_environment_async(request_type=dict) + + +def test_create_environment_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_environment_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_environment_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_environment( + parent="parent_value", + environment=gcn_environment.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].environment == gcn_environment.Environment(name="name_value") + assert args[0].environment_id == "environment_id_value" + + +def test_create_environment_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + service.CreateEnvironmentRequest(), + parent="parent_value", + environment=gcn_environment.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_environment_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_environment( + parent="parent_value", + environment=gcn_environment.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].environment == gcn_environment.Environment(name="name_value") + assert args[0].environment_id == "environment_id_value" + + +@pytest.mark.asyncio +async def test_create_environment_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_environment( + service.CreateEnvironmentRequest(), + parent="parent_value", + environment=gcn_environment.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + +def test_delete_environment( + transport: str = "grpc", request_type=service.DeleteEnvironmentRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_environment_from_dict(): + test_delete_environment(request_type=dict) + + +def test_delete_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + client.delete_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_delete_environment_async( + transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_environment_async_from_dict(): + await test_delete_environment_async(request_type=dict) + + +def test_delete_environment_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_environment_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_environment_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_environment_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_environment( + service.DeleteEnvironmentRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_environment_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_environment_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_environment( + service.DeleteEnvironmentRequest(), name="name_value", + ) + + +def test_list_schedules( + transport: str = "grpc", request_type=service.ListSchedulesRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSchedulesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSchedulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchedulesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_schedules_from_dict(): + test_list_schedules(request_type=dict) + + +def test_list_schedules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + client.list_schedules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSchedulesRequest() + + +@pytest.mark.asyncio +async def test_list_schedules_async( + transport: str = "grpc_asyncio", request_type=service.ListSchedulesRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSchedulesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSchedulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchedulesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_schedules_async_from_dict(): + await test_list_schedules_async(request_type=dict) + + +def test_list_schedules_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSchedulesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + call.return_value = service.ListSchedulesResponse() + client.list_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schedules_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSchedulesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSchedulesResponse() + ) + await client.list_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_schedules_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSchedulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schedules(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_schedules_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schedules( + service.ListSchedulesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_schedules_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSchedulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSchedulesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schedules(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_schedules_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schedules( + service.ListSchedulesRequest(), parent="parent_value", + ) + + +def test_list_schedules_pager(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSchedulesResponse( + schedules=[ + schedule.Schedule(), + schedule.Schedule(), + schedule.Schedule(), + ], + next_page_token="abc", + ), + service.ListSchedulesResponse(schedules=[], next_page_token="def",), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(),], next_page_token="ghi", + ), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(), schedule.Schedule(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_schedules(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, schedule.Schedule) for i in results) + + +def test_list_schedules_pages(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schedules), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSchedulesResponse( + schedules=[ + schedule.Schedule(), + schedule.Schedule(), + schedule.Schedule(), + ], + next_page_token="abc", + ), + service.ListSchedulesResponse(schedules=[], next_page_token="def",), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(),], next_page_token="ghi", + ), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(), schedule.Schedule(),], + ), + RuntimeError, + ) + pages = list(client.list_schedules(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schedules_async_pager(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schedules), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSchedulesResponse( + schedules=[ + schedule.Schedule(), + schedule.Schedule(), + schedule.Schedule(), + ], + next_page_token="abc", + ), + service.ListSchedulesResponse(schedules=[], next_page_token="def",), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(),], next_page_token="ghi", + ), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(), schedule.Schedule(),], + ), + RuntimeError, + ) + async_pager = await client.list_schedules(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schedule.Schedule) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schedules_async_pages(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schedules), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSchedulesResponse( + schedules=[ + schedule.Schedule(), + schedule.Schedule(), + schedule.Schedule(), + ], + next_page_token="abc", + ), + service.ListSchedulesResponse(schedules=[], next_page_token="def",), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(),], next_page_token="ghi", + ), + service.ListSchedulesResponse( + schedules=[schedule.Schedule(), schedule.Schedule(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_schedules(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_schedule(transport: str = "grpc", request_type=service.GetScheduleRequest): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.Schedule( + name="name_value", + display_name="display_name_value", + description="description_value", + state=schedule.Schedule.State.ENABLED, + cron_schedule="cron_schedule_value", + time_zone="time_zone_value", + ) + response = client.get_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.Schedule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == schedule.Schedule.State.ENABLED + assert response.cron_schedule == "cron_schedule_value" + assert response.time_zone == "time_zone_value" + + +def test_get_schedule_from_dict(): + test_get_schedule(request_type=dict) + + +def test_get_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + client.get_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetScheduleRequest() + + +@pytest.mark.asyncio +async def test_get_schedule_async( + transport: str = "grpc_asyncio", request_type=service.GetScheduleRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schedule.Schedule( + name="name_value", + display_name="display_name_value", + description="description_value", + state=schedule.Schedule.State.ENABLED, + cron_schedule="cron_schedule_value", + time_zone="time_zone_value", + ) + ) + response = await client.get_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schedule.Schedule) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == schedule.Schedule.State.ENABLED + assert response.cron_schedule == "cron_schedule_value" + assert response.time_zone == "time_zone_value" + + +@pytest.mark.asyncio +async def test_get_schedule_async_from_dict(): + await test_get_schedule_async(request_type=dict) + + +def test_get_schedule_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetScheduleRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + call.return_value = schedule.Schedule() + client.get_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_schedule_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetScheduleRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.Schedule()) + await client.get_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_schedule_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.Schedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_schedule(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_schedule_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schedule( + service.GetScheduleRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_schedule_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schedule.Schedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schedule.Schedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_schedule(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_schedule_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_schedule( + service.GetScheduleRequest(), name="name_value", + ) + + +def test_delete_schedule( + transport: str = "grpc", request_type=service.DeleteScheduleRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_schedule_from_dict(): + test_delete_schedule(request_type=dict) + + +def test_delete_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + client.delete_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteScheduleRequest() + + +@pytest.mark.asyncio +async def test_delete_schedule_async( + transport: str = "grpc_asyncio", request_type=service.DeleteScheduleRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_schedule_async_from_dict(): + await test_delete_schedule_async(request_type=dict) + + +def test_delete_schedule_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteScheduleRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schedule_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteScheduleRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_schedule_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schedule(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_schedule_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schedule( + service.DeleteScheduleRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schedule_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schedule(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_schedule_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schedule( + service.DeleteScheduleRequest(), name="name_value", + ) + + +def test_create_schedule( + transport: str = "grpc", request_type=service.CreateScheduleRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_schedule_from_dict(): + test_create_schedule(request_type=dict) + + +def test_create_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + client.create_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateScheduleRequest() + + +@pytest.mark.asyncio +async def test_create_schedule_async( + transport: str = "grpc_asyncio", request_type=service.CreateScheduleRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_schedule_async_from_dict(): + await test_create_schedule_async(request_type=dict) + + +def test_create_schedule_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateScheduleRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_schedule_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateScheduleRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_schedule_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_schedule( + parent="parent_value", + schedule=gcn_schedule.Schedule(name="name_value"), + schedule_id="schedule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].schedule == gcn_schedule.Schedule(name="name_value") + assert args[0].schedule_id == "schedule_id_value" + + +def test_create_schedule_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schedule( + service.CreateScheduleRequest(), + parent="parent_value", + schedule=gcn_schedule.Schedule(name="name_value"), + schedule_id="schedule_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_schedule_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_schedule( + parent="parent_value", + schedule=gcn_schedule.Schedule(name="name_value"), + schedule_id="schedule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].schedule == gcn_schedule.Schedule(name="name_value") + assert args[0].schedule_id == "schedule_id_value" + + +@pytest.mark.asyncio +async def test_create_schedule_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_schedule( + service.CreateScheduleRequest(), + parent="parent_value", + schedule=gcn_schedule.Schedule(name="name_value"), + schedule_id="schedule_id_value", + ) + + +def test_trigger_schedule( + transport: str = "grpc", request_type=service.TriggerScheduleRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.trigger_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.TriggerScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_trigger_schedule_from_dict(): + test_trigger_schedule(request_type=dict) + + +def test_trigger_schedule_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_schedule), "__call__") as call: + client.trigger_schedule() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.TriggerScheduleRequest() + + +@pytest.mark.asyncio +async def test_trigger_schedule_async( + transport: str = "grpc_asyncio", request_type=service.TriggerScheduleRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_schedule), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.TriggerScheduleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_trigger_schedule_async_from_dict(): + await test_trigger_schedule_async(request_type=dict) + + +def test_trigger_schedule_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.TriggerScheduleRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_schedule), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_trigger_schedule_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.TriggerScheduleRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_schedule), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.trigger_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_list_executions( + transport: str = "grpc", request_type=service.ListExecutionsRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListExecutionsResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListExecutionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExecutionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_executions_from_dict(): + test_list_executions(request_type=dict) + + +def test_list_executions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + client.list_executions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListExecutionsRequest() + + +@pytest.mark.asyncio +async def test_list_executions_async( + transport: str = "grpc_asyncio", request_type=service.ListExecutionsRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListExecutionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListExecutionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExecutionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_executions_async_from_dict(): + await test_list_executions_async(request_type=dict) + + +def test_list_executions_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListExecutionsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + call.return_value = service.ListExecutionsResponse() + client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_executions_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListExecutionsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListExecutionsResponse() + ) + await client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_executions_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListExecutionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_executions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_executions_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_executions( + service.ListExecutionsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_executions_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListExecutionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListExecutionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_executions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_executions_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_executions( + service.ListExecutionsRequest(), parent="parent_value", + ) + + +def test_list_executions_pager(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListExecutionsResponse( + executions=[ + execution.Execution(), + execution.Execution(), + execution.Execution(), + ], + next_page_token="abc", + ), + service.ListExecutionsResponse(executions=[], next_page_token="def",), + service.ListExecutionsResponse( + executions=[execution.Execution(),], next_page_token="ghi", + ), + service.ListExecutionsResponse( + executions=[execution.Execution(), execution.Execution(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_executions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, execution.Execution) for i in results) + + +def test_list_executions_pages(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListExecutionsResponse( + executions=[ + execution.Execution(), + execution.Execution(), + execution.Execution(), + ], + next_page_token="abc", + ), + service.ListExecutionsResponse(executions=[], next_page_token="def",), + service.ListExecutionsResponse( + executions=[execution.Execution(),], next_page_token="ghi", + ), + service.ListExecutionsResponse( + executions=[execution.Execution(), execution.Execution(),], + ), + RuntimeError, + ) + pages = list(client.list_executions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_executions_async_pager(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListExecutionsResponse( + executions=[ + execution.Execution(), + execution.Execution(), + execution.Execution(), + ], + next_page_token="abc", + ), + service.ListExecutionsResponse(executions=[], next_page_token="def",), + service.ListExecutionsResponse( + executions=[execution.Execution(),], next_page_token="ghi", + ), + service.ListExecutionsResponse( + executions=[execution.Execution(), execution.Execution(),], + ), + RuntimeError, + ) + async_pager = await client.list_executions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, execution.Execution) for i in responses) + + +@pytest.mark.asyncio +async def test_list_executions_async_pages(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListExecutionsResponse( + executions=[ + execution.Execution(), + execution.Execution(), + execution.Execution(), + ], + next_page_token="abc", + ), + service.ListExecutionsResponse(executions=[], next_page_token="def",), + service.ListExecutionsResponse( + executions=[execution.Execution(),], next_page_token="ghi", + ), + service.ListExecutionsResponse( + executions=[execution.Execution(), execution.Execution(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_executions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_execution( + transport: str = "grpc", request_type=service.GetExecutionRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = execution.Execution( + name="name_value", + display_name="display_name_value", + description="description_value", + state=execution.Execution.State.QUEUED, + output_notebook_file="output_notebook_file_value", + job_uri="job_uri_value", + ) + response = client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, execution.Execution) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == execution.Execution.State.QUEUED + assert response.output_notebook_file == "output_notebook_file_value" + assert response.job_uri == "job_uri_value" + + +def test_get_execution_from_dict(): + test_get_execution(request_type=dict) + + +def test_get_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + client.get_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetExecutionRequest() + + +@pytest.mark.asyncio +async def test_get_execution_async( + transport: str = "grpc_asyncio", request_type=service.GetExecutionRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + execution.Execution( + name="name_value", + display_name="display_name_value", + description="description_value", + state=execution.Execution.State.QUEUED, + output_notebook_file="output_notebook_file_value", + job_uri="job_uri_value", + ) + ) + response = await client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, execution.Execution) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == execution.Execution.State.QUEUED + assert response.output_notebook_file == "output_notebook_file_value" + assert response.job_uri == "job_uri_value" + + +@pytest.mark.asyncio +async def test_get_execution_async_from_dict(): + await test_get_execution_async(request_type=dict) + + +def test_get_execution_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetExecutionRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + call.return_value = execution.Execution() + client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_execution_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetExecutionRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(execution.Execution()) + await client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_execution_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = execution.Execution() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_execution_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_execution( + service.GetExecutionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_execution_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = execution.Execution() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(execution.Execution()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_execution_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_execution( + service.GetExecutionRequest(), name="name_value", + ) + + +def test_delete_execution( + transport: str = "grpc", request_type=service.DeleteExecutionRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_execution_from_dict(): + test_delete_execution(request_type=dict) + + +def test_delete_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + client.delete_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteExecutionRequest() + + +@pytest.mark.asyncio +async def test_delete_execution_async( + transport: str = "grpc_asyncio", request_type=service.DeleteExecutionRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_execution_async_from_dict(): + await test_delete_execution_async(request_type=dict) + + +def test_delete_execution_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteExecutionRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_execution_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteExecutionRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_execution_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_execution_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_execution( + service.DeleteExecutionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_execution_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_execution_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_execution( + service.DeleteExecutionRequest(), name="name_value", + ) + + +def test_create_execution( + transport: str = "grpc", request_type=service.CreateExecutionRequest +): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_execution_from_dict(): + test_create_execution(request_type=dict) + + +def test_create_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + client.create_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateExecutionRequest() + + +@pytest.mark.asyncio +async def test_create_execution_async( + transport: str = "grpc_asyncio", request_type=service.CreateExecutionRequest +): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_execution_async_from_dict(): + await test_create_execution_async(request_type=dict) + + +def test_create_execution_field_headers(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateExecutionRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_execution_field_headers_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateExecutionRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_execution_flattened(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_execution( + parent="parent_value", + execution=gcn_execution.Execution( + execution_template=gcn_execution.ExecutionTemplate( + scale_tier=gcn_execution.ExecutionTemplate.ScaleTier.BASIC + ) + ), + execution_id="execution_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].execution == gcn_execution.Execution( + execution_template=gcn_execution.ExecutionTemplate( + scale_tier=gcn_execution.ExecutionTemplate.ScaleTier.BASIC + ) + ) + assert args[0].execution_id == "execution_id_value" + + +def test_create_execution_flattened_error(): + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_execution( + service.CreateExecutionRequest(), + parent="parent_value", + execution=gcn_execution.Execution( + execution_template=gcn_execution.ExecutionTemplate( + scale_tier=gcn_execution.ExecutionTemplate.ScaleTier.BASIC + ) + ), + execution_id="execution_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_execution_flattened_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_execution( + parent="parent_value", + execution=gcn_execution.Execution( + execution_template=gcn_execution.ExecutionTemplate( + scale_tier=gcn_execution.ExecutionTemplate.ScaleTier.BASIC + ) + ), + execution_id="execution_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].execution == gcn_execution.Execution( + execution_template=gcn_execution.ExecutionTemplate( + scale_tier=gcn_execution.ExecutionTemplate.ScaleTier.BASIC + ) + ) + assert args[0].execution_id == "execution_id_value" + + +@pytest.mark.asyncio +async def test_create_execution_flattened_error_async(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_execution( + service.CreateExecutionRequest(), + parent="parent_value", + execution=gcn_execution.Execution( + execution_template=gcn_execution.ExecutionTemplate( + scale_tier=gcn_execution.ExecutionTemplate.ScaleTier.BASIC + ) + ), + execution_id="execution_id_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NotebookServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NotebookServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NotebookServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.NotebookServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.NotebookServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = NotebookServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.NotebookServiceGrpcTransport,) + + +def test_notebook_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NotebookServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_notebook_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.notebooks_v1.services.notebook_service.transports.NotebookServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.NotebookServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "register_instance", + "set_instance_accelerator", + "set_instance_machine_type", + "update_instance_config", + "update_shielded_instance_config", + "set_instance_labels", + "delete_instance", + "start_instance", + "stop_instance", + "reset_instance", + "report_instance_info", + "is_instance_upgradeable", + "get_instance_health", + "upgrade_instance", + "rollback_instance", + "upgrade_instance_internal", + "list_environments", + "get_environment", + "create_environment", + "delete_environment", + "list_schedules", + "get_schedule", + "delete_schedule", + "create_schedule", + "trigger_schedule", + "list_executions", + "get_execution", + "delete_execution", + "create_execution", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +@requires_google_auth_gte_1_25_0 +def test_notebook_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.notebooks_v1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotebookServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_notebook_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.notebooks_v1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotebookServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_notebook_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.notebooks_v1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NotebookServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_notebook_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NotebookServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_notebook_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NotebookServiceClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_notebook_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_notebook_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.NotebookServiceGrpcTransport, grpc_helpers), + (transports.NotebookServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_notebook_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "notebooks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="notebooks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +def test_notebook_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_notebook_service_host_no_port(): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="notebooks.googleapis.com" + ), + ) + assert client.transport._host == "notebooks.googleapis.com:443" + + +def test_notebook_service_host_with_port(): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="notebooks.googleapis.com:8000" + ), + ) + assert client.transport._host == "notebooks.googleapis.com:8000" + + +def test_notebook_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.NotebookServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_notebook_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.NotebookServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +def test_notebook_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.NotebookServiceGrpcTransport, + transports.NotebookServiceGrpcAsyncIOTransport, + ], +) +def test_notebook_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_notebook_service_grpc_lro_client(): + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_notebook_service_grpc_lro_async_client(): + client = NotebookServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_environment_path(): + project = "squid" + environment = "clam" + expected = "projects/{project}/environments/{environment}".format( + project=project, environment=environment, + ) + actual = NotebookServiceClient.environment_path(project, environment) + assert expected == actual + + +def test_parse_environment_path(): + expected = { + "project": "whelk", + "environment": "octopus", + } + path = NotebookServiceClient.environment_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_environment_path(path) + assert expected == actual + + +def test_execution_path(): + project = "oyster" + location = "nudibranch" + execution = "cuttlefish" + expected = "projects/{project}/location/{location}/executions/{execution}".format( + project=project, location=location, execution=execution, + ) + actual = NotebookServiceClient.execution_path(project, location, execution) + assert expected == actual + + +def test_parse_execution_path(): + expected = { + "project": "mussel", + "location": "winkle", + "execution": "nautilus", + } + path = NotebookServiceClient.execution_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_execution_path(path) + assert expected == actual + + +def test_instance_path(): + project = "scallop" + instance = "abalone" + expected = "projects/{project}/instances/{instance}".format( + project=project, instance=instance, + ) + actual = NotebookServiceClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "squid", + "instance": "clam", + } + path = NotebookServiceClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_instance_path(path) + assert expected == actual + + +def test_schedule_path(): + project = "whelk" + location = "octopus" + schedule = "oyster" + expected = "projects/{project}/location/{location}/schedules/{schedule}".format( + project=project, location=location, schedule=schedule, + ) + actual = NotebookServiceClient.schedule_path(project, location, schedule) + assert expected == actual + + +def test_parse_schedule_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "schedule": "mussel", + } + path = NotebookServiceClient.schedule_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_schedule_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = NotebookServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = NotebookServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder,) + actual = NotebookServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = NotebookServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization,) + actual = NotebookServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = NotebookServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project,) + actual = NotebookServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = NotebookServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = NotebookServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = NotebookServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NotebookServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.NotebookServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = NotebookServiceClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.NotebookServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = NotebookServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)