diff --git a/docs/artifactregistry_v1/artifact_registry.rst b/docs/artifactregistry_v1/artifact_registry.rst new file mode 100644 index 0000000..17cceae --- /dev/null +++ b/docs/artifactregistry_v1/artifact_registry.rst @@ -0,0 +1,10 @@ +ArtifactRegistry +---------------------------------- + +.. automodule:: google.cloud.artifactregistry_v1.services.artifact_registry + :members: + :inherited-members: + +.. automodule:: google.cloud.artifactregistry_v1.services.artifact_registry.pagers + :members: + :inherited-members: diff --git a/docs/artifactregistry_v1/services.rst b/docs/artifactregistry_v1/services.rst new file mode 100644 index 0000000..9193e13 --- /dev/null +++ b/docs/artifactregistry_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Artifactregistry v1 API +================================================= +.. toctree:: + :maxdepth: 2 + + artifact_registry diff --git a/docs/artifactregistry_v1/types.rst b/docs/artifactregistry_v1/types.rst new file mode 100644 index 0000000..88687a8 --- /dev/null +++ b/docs/artifactregistry_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Artifactregistry v1 API +============================================== + +.. automodule:: google.cloud.artifactregistry_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/index.rst b/docs/index.rst index 9280e92..5b40362 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,6 +2,17 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of Artifact Registry. +By default, you will get version ``v1``. + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + artifactregistry_v1/services + artifactregistry_v1/types + API Reference ------------- .. toctree:: diff --git a/google/cloud/artifactregistry/__init__.py b/google/cloud/artifactregistry/__init__.py index 22946d2..0cae645 100644 --- a/google/cloud/artifactregistry/__init__.py +++ b/google/cloud/artifactregistry/__init__.py @@ -14,87 +14,29 @@ # limitations under the License. # -from google.cloud.artifactregistry_v1beta2.services.artifact_registry.client import ( +from google.cloud.artifactregistry_v1.services.artifact_registry.client import ( ArtifactRegistryClient, ) -from google.cloud.artifactregistry_v1beta2.services.artifact_registry.async_client import ( +from google.cloud.artifactregistry_v1.services.artifact_registry.async_client import ( ArtifactRegistryAsyncClient, ) -from google.cloud.artifactregistry_v1beta2.types.file import File -from google.cloud.artifactregistry_v1beta2.types.file import GetFileRequest -from google.cloud.artifactregistry_v1beta2.types.file import Hash -from google.cloud.artifactregistry_v1beta2.types.file import ListFilesRequest -from google.cloud.artifactregistry_v1beta2.types.file import ListFilesResponse -from google.cloud.artifactregistry_v1beta2.types.package import DeletePackageRequest -from google.cloud.artifactregistry_v1beta2.types.package import GetPackageRequest -from google.cloud.artifactregistry_v1beta2.types.package import ListPackagesRequest -from google.cloud.artifactregistry_v1beta2.types.package import ListPackagesResponse -from google.cloud.artifactregistry_v1beta2.types.package import Package -from google.cloud.artifactregistry_v1beta2.types.repository import ( - CreateRepositoryRequest, -) -from google.cloud.artifactregistry_v1beta2.types.repository import ( - DeleteRepositoryRequest, -) -from google.cloud.artifactregistry_v1beta2.types.repository import GetRepositoryRequest -from google.cloud.artifactregistry_v1beta2.types.repository import ( - ListRepositoriesRequest, -) -from google.cloud.artifactregistry_v1beta2.types.repository import ( - ListRepositoriesResponse, -) -from google.cloud.artifactregistry_v1beta2.types.repository import Repository -from google.cloud.artifactregistry_v1beta2.types.repository import ( - UpdateRepositoryRequest, -) -from google.cloud.artifactregistry_v1beta2.types.service import OperationMetadata -from google.cloud.artifactregistry_v1beta2.types.tag import CreateTagRequest -from google.cloud.artifactregistry_v1beta2.types.tag import DeleteTagRequest -from google.cloud.artifactregistry_v1beta2.types.tag import GetTagRequest -from google.cloud.artifactregistry_v1beta2.types.tag import ListTagsRequest -from google.cloud.artifactregistry_v1beta2.types.tag import ListTagsResponse -from google.cloud.artifactregistry_v1beta2.types.tag import Tag -from google.cloud.artifactregistry_v1beta2.types.tag import UpdateTagRequest -from google.cloud.artifactregistry_v1beta2.types.version import DeleteVersionRequest -from google.cloud.artifactregistry_v1beta2.types.version import GetVersionRequest -from google.cloud.artifactregistry_v1beta2.types.version import ListVersionsRequest -from google.cloud.artifactregistry_v1beta2.types.version import ListVersionsResponse -from google.cloud.artifactregistry_v1beta2.types.version import Version -from google.cloud.artifactregistry_v1beta2.types.version import VersionView +from google.cloud.artifactregistry_v1.types.artifact import DockerImage +from google.cloud.artifactregistry_v1.types.artifact import ListDockerImagesRequest +from google.cloud.artifactregistry_v1.types.artifact import ListDockerImagesResponse +from google.cloud.artifactregistry_v1.types.repository import GetRepositoryRequest +from google.cloud.artifactregistry_v1.types.repository import ListRepositoriesRequest +from google.cloud.artifactregistry_v1.types.repository import ListRepositoriesResponse +from google.cloud.artifactregistry_v1.types.repository import Repository __all__ = ( "ArtifactRegistryClient", "ArtifactRegistryAsyncClient", - "File", - "GetFileRequest", - "Hash", - "ListFilesRequest", - "ListFilesResponse", - "DeletePackageRequest", - "GetPackageRequest", - "ListPackagesRequest", - "ListPackagesResponse", - "Package", - "CreateRepositoryRequest", - "DeleteRepositoryRequest", + "DockerImage", + "ListDockerImagesRequest", + "ListDockerImagesResponse", "GetRepositoryRequest", "ListRepositoriesRequest", "ListRepositoriesResponse", "Repository", - "UpdateRepositoryRequest", - "OperationMetadata", - "CreateTagRequest", - "DeleteTagRequest", - "GetTagRequest", - "ListTagsRequest", - "ListTagsResponse", - "Tag", - "UpdateTagRequest", - "DeleteVersionRequest", - "GetVersionRequest", - "ListVersionsRequest", - "ListVersionsResponse", - "Version", - "VersionView", ) diff --git a/google/cloud/artifactregistry_v1/__init__.py b/google/cloud/artifactregistry_v1/__init__.py new file mode 100644 index 0000000..d9f8179 --- /dev/null +++ b/google/cloud/artifactregistry_v1/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.artifact_registry import ArtifactRegistryClient +from .services.artifact_registry import ArtifactRegistryAsyncClient + +from .types.artifact import DockerImage +from .types.artifact import ListDockerImagesRequest +from .types.artifact import ListDockerImagesResponse +from .types.repository import GetRepositoryRequest +from .types.repository import ListRepositoriesRequest +from .types.repository import ListRepositoriesResponse +from .types.repository import Repository + +__all__ = ( + "ArtifactRegistryAsyncClient", + "ArtifactRegistryClient", + "DockerImage", + "GetRepositoryRequest", + "ListDockerImagesRequest", + "ListDockerImagesResponse", + "ListRepositoriesRequest", + "ListRepositoriesResponse", + "Repository", +) diff --git a/google/cloud/artifactregistry_v1/gapic_metadata.json b/google/cloud/artifactregistry_v1/gapic_metadata.json new file mode 100644 index 0000000..3f25998 --- /dev/null +++ b/google/cloud/artifactregistry_v1/gapic_metadata.json @@ -0,0 +1,53 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.artifactregistry_v1", + "protoPackage": "google.devtools.artifactregistry.v1", + "schema": "1.0", + "services": { + "ArtifactRegistry": { + "clients": { + "grpc": { + "libraryClient": "ArtifactRegistryClient", + "rpcs": { + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListDockerImages": { + "methods": [ + "list_docker_images" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ArtifactRegistryAsyncClient", + "rpcs": { + "GetRepository": { + "methods": [ + "get_repository" + ] + }, + "ListDockerImages": { + "methods": [ + "list_docker_images" + ] + }, + "ListRepositories": { + "methods": [ + "list_repositories" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/artifactregistry_v1/py.typed b/google/cloud/artifactregistry_v1/py.typed new file mode 100644 index 0000000..7ee2398 --- /dev/null +++ b/google/cloud/artifactregistry_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-artifact-registry package uses inline types. diff --git a/google/cloud/artifactregistry_v1/services/__init__.py b/google/cloud/artifactregistry_v1/services/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/__init__.py b/google/cloud/artifactregistry_v1/services/artifact_registry/__init__.py new file mode 100644 index 0000000..ddff449 --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ArtifactRegistryClient +from .async_client import ArtifactRegistryAsyncClient + +__all__ = ( + "ArtifactRegistryClient", + "ArtifactRegistryAsyncClient", +) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py b/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py new file mode 100644 index 0000000..6fe8cff --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py @@ -0,0 +1,425 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.artifactregistry_v1.services.artifact_registry import pagers +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ArtifactRegistryTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ArtifactRegistryGrpcAsyncIOTransport +from .client import ArtifactRegistryClient + + +class ArtifactRegistryAsyncClient: + """The Artifact Registry API service. + + Artifact Registry is an artifact management system for storing + artifacts from different package management systems. + + The resources managed by this API are: + + - Repositories, which group packages and their data. + - Packages, which group versions and their tags. + - Versions, which are specific forms of a package. + - Tags, which represent alternative names for versions. + - Files, which contain content and are optionally associated with a + Package or Version. + """ + + _client: ArtifactRegistryClient + + DEFAULT_ENDPOINT = ArtifactRegistryClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ArtifactRegistryClient.DEFAULT_MTLS_ENDPOINT + + docker_image_path = staticmethod(ArtifactRegistryClient.docker_image_path) + parse_docker_image_path = staticmethod( + ArtifactRegistryClient.parse_docker_image_path + ) + repository_path = staticmethod(ArtifactRegistryClient.repository_path) + parse_repository_path = staticmethod(ArtifactRegistryClient.parse_repository_path) + common_billing_account_path = staticmethod( + ArtifactRegistryClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ArtifactRegistryClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ArtifactRegistryClient.common_folder_path) + parse_common_folder_path = staticmethod( + ArtifactRegistryClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ArtifactRegistryClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ArtifactRegistryClient.parse_common_organization_path + ) + common_project_path = staticmethod(ArtifactRegistryClient.common_project_path) + parse_common_project_path = staticmethod( + ArtifactRegistryClient.parse_common_project_path + ) + common_location_path = staticmethod(ArtifactRegistryClient.common_location_path) + parse_common_location_path = staticmethod( + ArtifactRegistryClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ArtifactRegistryAsyncClient: The constructed client. + """ + return ArtifactRegistryClient.from_service_account_info.__func__(ArtifactRegistryAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ArtifactRegistryAsyncClient: The constructed client. + """ + return ArtifactRegistryClient.from_service_account_file.__func__(ArtifactRegistryAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ArtifactRegistryTransport: + """Returns the transport used by the client instance. + + Returns: + ArtifactRegistryTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ArtifactRegistryClient).get_transport_class, type(ArtifactRegistryClient) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, ArtifactRegistryTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the artifact registry client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ArtifactRegistryTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ArtifactRegistryClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_docker_images( + self, + request: artifact.ListDockerImagesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDockerImagesAsyncPager: + r"""Lists docker images. + + Args: + request (:class:`google.cloud.artifactregistry_v1.types.ListDockerImagesRequest`): + The request object. The request to list docker images. + parent (:class:`str`): + Required. The name of the parent + resource whose docker images will be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListDockerImagesAsyncPager: + The response from listing docker + images. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = artifact.ListDockerImagesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_docker_images, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDockerImagesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_repositories( + self, + request: repository.ListRepositoriesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRepositoriesAsyncPager: + r"""Lists repositories. + + Args: + request (:class:`google.cloud.artifactregistry_v1.types.ListRepositoriesRequest`): + The request object. The request to list repositories. + parent (:class:`str`): + Required. The name of the parent + resource whose repositories will be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRepositoriesAsyncPager: + The response from listing + repositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = repository.ListRepositoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_repositories, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRepositoriesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_repository( + self, + request: repository.GetRepositoryRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repository.Repository: + r"""Gets a repository. + + Args: + request (:class:`google.cloud.artifactregistry_v1.types.GetRepositoryRequest`): + The request object. The request to retrieve a + repository. + name (:class:`str`): + Required. The name of the repository + to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Repository: + A Repository for storing artifacts + with a specific format. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = repository.GetRepositoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_repository, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-artifact-registry", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ArtifactRegistryAsyncClient",) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/client.py b/google/cloud/artifactregistry_v1/services/artifact_registry/client.py new file mode 100644 index 0000000..c90fdb6 --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/client.py @@ -0,0 +1,631 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.artifactregistry_v1.services.artifact_registry import pagers +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ArtifactRegistryTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ArtifactRegistryGrpcTransport +from .transports.grpc_asyncio import ArtifactRegistryGrpcAsyncIOTransport + + +class ArtifactRegistryClientMeta(type): + """Metaclass for the ArtifactRegistry client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ArtifactRegistryTransport]] + _transport_registry["grpc"] = ArtifactRegistryGrpcTransport + _transport_registry["grpc_asyncio"] = ArtifactRegistryGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[ArtifactRegistryTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ArtifactRegistryClient(metaclass=ArtifactRegistryClientMeta): + """The Artifact Registry API service. + + Artifact Registry is an artifact management system for storing + artifacts from different package management systems. + + The resources managed by this API are: + + - Repositories, which group packages and their data. + - Packages, which group versions and their tags. + - Versions, which are specific forms of a package. + - Tags, which represent alternative names for versions. + - Files, which contain content and are optionally associated with a + Package or Version. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "artifactregistry.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ArtifactRegistryClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ArtifactRegistryClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ArtifactRegistryTransport: + """Returns the transport used by the client instance. + + Returns: + ArtifactRegistryTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def docker_image_path( + project: str, location: str, repository: str, docker_image: str, + ) -> str: + """Returns a fully-qualified docker_image string.""" + return "projects/{project}/locations/{location}/repositories/{repository}/dockerImages/{docker_image}".format( + project=project, + location=location, + repository=repository, + docker_image=docker_image, + ) + + @staticmethod + def parse_docker_image_path(path: str) -> Dict[str, str]: + """Parses a docker_image path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/repositories/(?P.+?)/dockerImages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def repository_path(project: str, location: str, repository: str,) -> str: + """Returns a fully-qualified repository string.""" + return "projects/{project}/locations/{location}/repositories/{repository}".format( + project=project, location=location, repository=repository, + ) + + @staticmethod + def parse_repository_path(path: str) -> Dict[str, str]: + """Parses a repository path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/repositories/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ArtifactRegistryTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the artifact registry client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ArtifactRegistryTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ArtifactRegistryTransport): + # transport is a ArtifactRegistryTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def list_docker_images( + self, + request: Union[artifact.ListDockerImagesRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDockerImagesPager: + r"""Lists docker images. + + Args: + request (Union[google.cloud.artifactregistry_v1.types.ListDockerImagesRequest, dict]): + The request object. The request to list docker images. + parent (str): + Required. The name of the parent + resource whose docker images will be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListDockerImagesPager: + The response from listing docker + images. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a artifact.ListDockerImagesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, artifact.ListDockerImagesRequest): + request = artifact.ListDockerImagesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_docker_images] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDockerImagesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_repositories( + self, + request: Union[repository.ListRepositoriesRequest, dict] = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRepositoriesPager: + r"""Lists repositories. + + Args: + request (Union[google.cloud.artifactregistry_v1.types.ListRepositoriesRequest, dict]): + The request object. The request to list repositories. + parent (str): + Required. The name of the parent + resource whose repositories will be + listed. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.services.artifact_registry.pagers.ListRepositoriesPager: + The response from listing + repositories. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a repository.ListRepositoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repository.ListRepositoriesRequest): + request = repository.ListRepositoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_repositories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRepositoriesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_repository( + self, + request: Union[repository.GetRepositoryRequest, dict] = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> repository.Repository: + r"""Gets a repository. + + Args: + request (Union[google.cloud.artifactregistry_v1.types.GetRepositoryRequest, dict]): + The request object. The request to retrieve a + repository. + name (str): + Required. The name of the repository + to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.artifactregistry_v1.types.Repository: + A Repository for storing artifacts + with a specific format. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a repository.GetRepositoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, repository.GetRepositoryRequest): + request = repository.GetRepositoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_repository] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-artifact-registry", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ArtifactRegistryClient",) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py b/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py new file mode 100644 index 0000000..c43a81f --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/pagers.py @@ -0,0 +1,284 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository + + +class ListDockerImagesPager: + """A pager for iterating through ``list_docker_images`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListDockerImagesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``docker_images`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDockerImages`` requests and continue to iterate + through the ``docker_images`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListDockerImagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., artifact.ListDockerImagesResponse], + request: artifact.ListDockerImagesRequest, + response: artifact.ListDockerImagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListDockerImagesRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListDockerImagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = artifact.ListDockerImagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[artifact.ListDockerImagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[artifact.DockerImage]: + for page in self.pages: + yield from page.docker_images + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDockerImagesAsyncPager: + """A pager for iterating through ``list_docker_images`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListDockerImagesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``docker_images`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDockerImages`` requests and continue to iterate + through the ``docker_images`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListDockerImagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[artifact.ListDockerImagesResponse]], + request: artifact.ListDockerImagesRequest, + response: artifact.ListDockerImagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListDockerImagesRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListDockerImagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = artifact.ListDockerImagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[artifact.ListDockerImagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[artifact.DockerImage]: + async def async_generator(): + async for page in self.pages: + for response in page.docker_images: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRepositoriesPager: + """A pager for iterating through ``list_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListRepositoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., repository.ListRepositoriesResponse], + request: repository.ListRepositoriesRequest, + response: repository.ListRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListRepositoriesRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repository.ListRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[repository.ListRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[repository.Repository]: + for page in self.pages: + yield from page.repositories + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRepositoriesAsyncPager: + """A pager for iterating through ``list_repositories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.artifactregistry_v1.types.ListRepositoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``repositories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRepositories`` requests and continue to iterate + through the ``repositories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.artifactregistry_v1.types.ListRepositoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[repository.ListRepositoriesResponse]], + request: repository.ListRepositoriesRequest, + response: repository.ListRepositoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.artifactregistry_v1.types.ListRepositoriesRequest): + The initial request object. + response (google.cloud.artifactregistry_v1.types.ListRepositoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = repository.ListRepositoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[repository.ListRepositoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[repository.Repository]: + async def async_generator(): + async for page in self.pages: + for response in page.repositories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/transports/__init__.py b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/__init__.py new file mode 100644 index 0000000..7e4d6be --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ArtifactRegistryTransport +from .grpc import ArtifactRegistryGrpcTransport +from .grpc_asyncio import ArtifactRegistryGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ArtifactRegistryTransport]] +_transport_registry["grpc"] = ArtifactRegistryGrpcTransport +_transport_registry["grpc_asyncio"] = ArtifactRegistryGrpcAsyncIOTransport + +__all__ = ( + "ArtifactRegistryTransport", + "ArtifactRegistryGrpcTransport", + "ArtifactRegistryGrpcAsyncIOTransport", +) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py new file mode 100644 index 0000000..82b26eb --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-artifact-registry", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class ArtifactRegistryTransport(abc.ABC): + """Abstract transport class for ArtifactRegistry.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ) + + DEFAULT_HOST: str = "artifactregistry.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_docker_images: gapic_v1.method.wrap_method( + self.list_docker_images, default_timeout=None, client_info=client_info, + ), + self.list_repositories: gapic_v1.method.wrap_method( + self.list_repositories, default_timeout=None, client_info=client_info, + ), + self.get_repository: gapic_v1.method.wrap_method( + self.get_repository, default_timeout=None, client_info=client_info, + ), + } + + @property + def list_docker_images( + self, + ) -> Callable[ + [artifact.ListDockerImagesRequest], + Union[ + artifact.ListDockerImagesResponse, + Awaitable[artifact.ListDockerImagesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_repositories( + self, + ) -> Callable[ + [repository.ListRepositoriesRequest], + Union[ + repository.ListRepositoriesResponse, + Awaitable[repository.ListRepositoriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_repository( + self, + ) -> Callable[ + [repository.GetRepositoryRequest], + Union[repository.Repository, Awaitable[repository.Repository]], + ]: + raise NotImplementedError() + + +__all__ = ("ArtifactRegistryTransport",) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py new file mode 100644 index 0000000..f86aaa9 --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository +from .base import ArtifactRegistryTransport, DEFAULT_CLIENT_INFO + + +class ArtifactRegistryGrpcTransport(ArtifactRegistryTransport): + """gRPC backend transport for ArtifactRegistry. + + The Artifact Registry API service. + + Artifact Registry is an artifact management system for storing + artifacts from different package management systems. + + The resources managed by this API are: + + - Repositories, which group packages and their data. + - Packages, which group versions and their tags. + - Versions, which are specific forms of a package. + - Tags, which represent alternative names for versions. + - Files, which contain content and are optionally associated with a + Package or Version. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "artifactregistry.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "artifactregistry.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_docker_images( + self, + ) -> Callable[ + [artifact.ListDockerImagesRequest], artifact.ListDockerImagesResponse + ]: + r"""Return a callable for the list docker images method over gRPC. + + Lists docker images. + + Returns: + Callable[[~.ListDockerImagesRequest], + ~.ListDockerImagesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_docker_images" not in self._stubs: + self._stubs["list_docker_images"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListDockerImages", + request_serializer=artifact.ListDockerImagesRequest.serialize, + response_deserializer=artifact.ListDockerImagesResponse.deserialize, + ) + return self._stubs["list_docker_images"] + + @property + def list_repositories( + self, + ) -> Callable[ + [repository.ListRepositoriesRequest], repository.ListRepositoriesResponse + ]: + r"""Return a callable for the list repositories method over gRPC. + + Lists repositories. + + Returns: + Callable[[~.ListRepositoriesRequest], + ~.ListRepositoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_repositories" not in self._stubs: + self._stubs["list_repositories"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListRepositories", + request_serializer=repository.ListRepositoriesRequest.serialize, + response_deserializer=repository.ListRepositoriesResponse.deserialize, + ) + return self._stubs["list_repositories"] + + @property + def get_repository( + self, + ) -> Callable[[repository.GetRepositoryRequest], repository.Repository]: + r"""Return a callable for the get repository method over gRPC. + + Gets a repository. + + Returns: + Callable[[~.GetRepositoryRequest], + ~.Repository]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_repository" not in self._stubs: + self._stubs["get_repository"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/GetRepository", + request_serializer=repository.GetRepositoryRequest.serialize, + response_deserializer=repository.Repository.deserialize, + ) + return self._stubs["get_repository"] + + +__all__ = ("ArtifactRegistryGrpcTransport",) diff --git a/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py new file mode 100644 index 0000000..d7b4f71 --- /dev/null +++ b/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py @@ -0,0 +1,328 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository +from .base import ArtifactRegistryTransport, DEFAULT_CLIENT_INFO +from .grpc import ArtifactRegistryGrpcTransport + + +class ArtifactRegistryGrpcAsyncIOTransport(ArtifactRegistryTransport): + """gRPC AsyncIO backend transport for ArtifactRegistry. + + The Artifact Registry API service. + + Artifact Registry is an artifact management system for storing + artifacts from different package management systems. + + The resources managed by this API are: + + - Repositories, which group packages and their data. + - Packages, which group versions and their tags. + - Versions, which are specific forms of a package. + - Tags, which represent alternative names for versions. + - Files, which contain content and are optionally associated with a + Package or Version. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "artifactregistry.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "artifactregistry.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_docker_images( + self, + ) -> Callable[ + [artifact.ListDockerImagesRequest], Awaitable[artifact.ListDockerImagesResponse] + ]: + r"""Return a callable for the list docker images method over gRPC. + + Lists docker images. + + Returns: + Callable[[~.ListDockerImagesRequest], + Awaitable[~.ListDockerImagesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_docker_images" not in self._stubs: + self._stubs["list_docker_images"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListDockerImages", + request_serializer=artifact.ListDockerImagesRequest.serialize, + response_deserializer=artifact.ListDockerImagesResponse.deserialize, + ) + return self._stubs["list_docker_images"] + + @property + def list_repositories( + self, + ) -> Callable[ + [repository.ListRepositoriesRequest], + Awaitable[repository.ListRepositoriesResponse], + ]: + r"""Return a callable for the list repositories method over gRPC. + + Lists repositories. + + Returns: + Callable[[~.ListRepositoriesRequest], + Awaitable[~.ListRepositoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_repositories" not in self._stubs: + self._stubs["list_repositories"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ListRepositories", + request_serializer=repository.ListRepositoriesRequest.serialize, + response_deserializer=repository.ListRepositoriesResponse.deserialize, + ) + return self._stubs["list_repositories"] + + @property + def get_repository( + self, + ) -> Callable[[repository.GetRepositoryRequest], Awaitable[repository.Repository]]: + r"""Return a callable for the get repository method over gRPC. + + Gets a repository. + + Returns: + Callable[[~.GetRepositoryRequest], + Awaitable[~.Repository]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_repository" not in self._stubs: + self._stubs["get_repository"] = self.grpc_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/GetRepository", + request_serializer=repository.GetRepositoryRequest.serialize, + response_deserializer=repository.Repository.deserialize, + ) + return self._stubs["get_repository"] + + +__all__ = ("ArtifactRegistryGrpcAsyncIOTransport",) diff --git a/google/cloud/artifactregistry_v1/types/__init__.py b/google/cloud/artifactregistry_v1/types/__init__.py new file mode 100644 index 0000000..e226909 --- /dev/null +++ b/google/cloud/artifactregistry_v1/types/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .artifact import ( + DockerImage, + ListDockerImagesRequest, + ListDockerImagesResponse, +) +from .repository import ( + GetRepositoryRequest, + ListRepositoriesRequest, + ListRepositoriesResponse, + Repository, +) + +__all__ = ( + "DockerImage", + "ListDockerImagesRequest", + "ListDockerImagesResponse", + "GetRepositoryRequest", + "ListRepositoriesRequest", + "ListRepositoriesResponse", + "Repository", +) diff --git a/google/cloud/artifactregistry_v1/types/artifact.py b/google/cloud/artifactregistry_v1/types/artifact.py new file mode 100644 index 0000000..994599f --- /dev/null +++ b/google/cloud/artifactregistry_v1/types/artifact.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.devtools.artifactregistry.v1", + manifest={"DockerImage", "ListDockerImagesRequest", "ListDockerImagesResponse",}, +) + + +class DockerImage(proto.Message): + r"""DockerImage represents a docker artifact. + The following fields are returned as untyped metadata in the + Version resource, using camelcase keys (i.e. + metadata.imageSizeBytes): + + - imageSizeBytes + - mediaType + - buildTime + + Attributes: + name (str): + Required. registry_location, project_id, repository_name and + image id forms a unique image + name:\ ``projects//locations//repository//dockerImages/``. + For example, + "projects/test-project/locations/us-west4/repositories/test-repo/dockerImages/ + nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf", + where "us-west4" is the registry_location, "test-project" is + the project_id, "test-repo" is the repository_name and + "nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf" + is the image's digest. + uri (str): + Required. URL to access the image. + Example: + us-west4-docker.pkg.dev/test-project/test- + repo/nginx@sha256:e9954c1fc875017be1c3e36eca16be2d9e9bccc4bf072163515467d6a823c7cf + tags (Sequence[str]): + Tags attached to this image. + image_size_bytes (int): + Calculated size of the image. + This field is returned as the + 'metadata.imageSizeBytes' field in the Version + resource. + upload_time (google.protobuf.timestamp_pb2.Timestamp): + Time the image was uploaded. + media_type (str): + Media type of this image, e.g. + "application/vnd.docker.distribution.manifest.v2+json". + This field is returned as the + 'metadata.mediaType' field in the Version + resource. + build_time (google.protobuf.timestamp_pb2.Timestamp): + The time this image was built. + This field is returned as the + 'metadata.buildTime' field in the Version + resource. + The build time is returned to the client as an + RFC 3339 string, which can be easily used with + the JavaScript Date constructor. + """ + + name = proto.Field(proto.STRING, number=1,) + uri = proto.Field(proto.STRING, number=2,) + tags = proto.RepeatedField(proto.STRING, number=3,) + image_size_bytes = proto.Field(proto.INT64, number=4,) + upload_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + media_type = proto.Field(proto.STRING, number=6,) + build_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + + +class ListDockerImagesRequest(proto.Message): + r"""The request to list docker images. + + Attributes: + parent (str): + Required. The name of the parent resource + whose docker images will be listed. + page_size (int): + The maximum number of artifacts to return. + page_token (str): + The next_page_token value returned from a previous list + request, if any. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListDockerImagesResponse(proto.Message): + r"""The response from listing docker images. + + Attributes: + docker_images (Sequence[google.cloud.artifactregistry_v1.types.DockerImage]): + The docker images returned. + next_page_token (str): + The token to retrieve the next page of + artifacts, or empty if there are no more + artifacts to return. + """ + + @property + def raw_page(self): + return self + + docker_images = proto.RepeatedField(proto.MESSAGE, number=1, message="DockerImage",) + next_page_token = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/artifactregistry_v1/types/repository.py b/google/cloud/artifactregistry_v1/types/repository.py new file mode 100644 index 0000000..767886a --- /dev/null +++ b/google/cloud/artifactregistry_v1/types/repository.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.devtools.artifactregistry.v1", + manifest={ + "Repository", + "ListRepositoriesRequest", + "ListRepositoriesResponse", + "GetRepositoryRequest", + }, +) + + +class Repository(proto.Message): + r"""A Repository for storing artifacts with a specific format. + + Attributes: + name (str): + The name of the repository, for example: + "projects/p1/locations/us- + central1/repositories/repo1". + format_ (google.cloud.artifactregistry_v1.types.Repository.Format): + The format of packages that are stored in the + repository. + description (str): + The user-provided description of the + repository. + labels (Sequence[google.cloud.artifactregistry_v1.types.Repository.LabelsEntry]): + Labels with user-defined metadata. + This field may contain up to 64 entries. Label + keys and values may be no longer than 63 + characters. Label keys must begin with a + lowercase letter and may only contain lowercase + letters, numeric characters, underscores, and + dashes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the repository was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the repository was last + updated. + kms_key_name (str): + The Cloud KMS resource name of the customer managed + encryption key that’s used to encrypt the contents of the + Repository. Has the form: + ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. + This value may not be changed after the Repository has been + created. + """ + + class Format(proto.Enum): + r"""A package format.""" + FORMAT_UNSPECIFIED = 0 + DOCKER = 1 + MAVEN = 2 + NPM = 3 + APT = 5 + YUM = 6 + PYTHON = 8 + + name = proto.Field(proto.STRING, number=1,) + format_ = proto.Field(proto.ENUM, number=2, enum=Format,) + description = proto.Field(proto.STRING, number=3,) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + kms_key_name = proto.Field(proto.STRING, number=8,) + + +class ListRepositoriesRequest(proto.Message): + r"""The request to list repositories. + + Attributes: + parent (str): + Required. The name of the parent resource + whose repositories will be listed. + page_size (int): + The maximum number of repositories to return. + page_token (str): + The next_page_token value returned from a previous list + request, if any. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListRepositoriesResponse(proto.Message): + r"""The response from listing repositories. + + Attributes: + repositories (Sequence[google.cloud.artifactregistry_v1.types.Repository]): + The repositories returned. + next_page_token (str): + The token to retrieve the next page of + repositories, or empty if there are no more + repositories to return. + """ + + @property + def raw_page(self): + return self + + repositories = proto.RepeatedField(proto.MESSAGE, number=1, message="Repository",) + next_page_token = proto.Field(proto.STRING, number=2,) + + +class GetRepositoryRequest(proto.Message): + r"""The request to retrieve a repository. + + Attributes: + name (str): + Required. The name of the repository to + retrieve. + """ + + name = proto.Field(proto.STRING, number=1,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/artifactregistry_v1/types/service.py b/google/cloud/artifactregistry_v1/types/service.py new file mode 100644 index 0000000..9d32230 --- /dev/null +++ b/google/cloud/artifactregistry_v1/types/service.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +__protobuf__ = proto.module(package="google.devtools.artifactregistry.v1", manifest={},) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py index b79cc27..2470122 100644 --- a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py +++ b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -395,7 +395,7 @@ def __init__( def list_repositories( self, - request: repository.ListRepositoriesRequest = None, + request: Union[repository.ListRepositoriesRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -405,7 +405,7 @@ def list_repositories( r"""Lists repositories. Args: - request (google.cloud.artifactregistry_v1beta2.types.ListRepositoriesRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.ListRepositoriesRequest, dict]): The request object. The request to list repositories. parent (str): The name of the parent resource whose @@ -474,7 +474,7 @@ def list_repositories( def get_repository( self, - request: repository.GetRepositoryRequest = None, + request: Union[repository.GetRepositoryRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -484,7 +484,7 @@ def get_repository( r"""Gets a repository. Args: - request (google.cloud.artifactregistry_v1beta2.types.GetRepositoryRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.GetRepositoryRequest, dict]): The request object. The request to retrieve a repository. name (str): @@ -545,7 +545,7 @@ def get_repository( def create_repository( self, - request: gda_repository.CreateRepositoryRequest = None, + request: Union[gda_repository.CreateRepositoryRequest, dict] = None, *, parent: str = None, repository: gda_repository.Repository = None, @@ -559,7 +559,7 @@ def create_repository( response will be the created Repository. Args: - request (google.cloud.artifactregistry_v1beta2.types.CreateRepositoryRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.CreateRepositoryRequest, dict]): The request object. The request to create a new repository. parent (str): @@ -648,7 +648,7 @@ def create_repository( def update_repository( self, - request: gda_repository.UpdateRepositoryRequest = None, + request: Union[gda_repository.UpdateRepositoryRequest, dict] = None, *, repository: gda_repository.Repository = None, update_mask: field_mask_pb2.FieldMask = None, @@ -659,7 +659,7 @@ def update_repository( r"""Updates a repository. Args: - request (google.cloud.artifactregistry_v1beta2.types.UpdateRepositoryRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.UpdateRepositoryRequest, dict]): The request object. The request to update a repository. repository (google.cloud.artifactregistry_v1beta2.types.Repository): The repository that replaces the @@ -731,7 +731,7 @@ def update_repository( def delete_repository( self, - request: repository.DeleteRepositoryRequest = None, + request: Union[repository.DeleteRepositoryRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -744,7 +744,7 @@ def delete_repository( and will return a google.protobuf.Empty response. Args: - request (google.cloud.artifactregistry_v1beta2.types.DeleteRepositoryRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.DeleteRepositoryRequest, dict]): The request object. The request to delete a repository. name (str): The name of the repository to delete. @@ -823,7 +823,7 @@ def delete_repository( def list_packages( self, - request: package.ListPackagesRequest = None, + request: Union[package.ListPackagesRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -833,7 +833,7 @@ def list_packages( r"""Lists packages. Args: - request (google.cloud.artifactregistry_v1beta2.types.ListPackagesRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.ListPackagesRequest, dict]): The request object. The request to list packages. parent (str): The name of the parent resource whose @@ -901,7 +901,7 @@ def list_packages( def get_package( self, - request: package.GetPackageRequest = None, + request: Union[package.GetPackageRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -911,7 +911,7 @@ def get_package( r"""Gets a package. Args: - request (google.cloud.artifactregistry_v1beta2.types.GetPackageRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.GetPackageRequest, dict]): The request object. The request to retrieve a package. name (str): The name of the package to retrieve. @@ -969,7 +969,7 @@ def get_package( def delete_package( self, - request: package.DeletePackageRequest = None, + request: Union[package.DeletePackageRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -981,7 +981,7 @@ def delete_package( has been deleted. Args: - request (google.cloud.artifactregistry_v1beta2.types.DeletePackageRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.DeletePackageRequest, dict]): The request object. The request to delete a package. name (str): The name of the package to delete. @@ -1060,7 +1060,7 @@ def delete_package( def list_versions( self, - request: version.ListVersionsRequest = None, + request: Union[version.ListVersionsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1070,7 +1070,7 @@ def list_versions( r"""Lists versions. Args: - request (google.cloud.artifactregistry_v1beta2.types.ListVersionsRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.ListVersionsRequest, dict]): The request object. The request to list versions. parent (str): The name of the parent resource whose @@ -1138,7 +1138,7 @@ def list_versions( def get_version( self, - request: version.GetVersionRequest = None, + request: Union[version.GetVersionRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1148,7 +1148,7 @@ def get_version( r"""Gets a version Args: - request (google.cloud.artifactregistry_v1beta2.types.GetVersionRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.GetVersionRequest, dict]): The request object. The request to retrieve a version. name (str): The name of the version to retrieve. @@ -1209,7 +1209,7 @@ def get_version( def delete_version( self, - request: version.DeleteVersionRequest = None, + request: Union[version.DeleteVersionRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1221,7 +1221,7 @@ def delete_version( been deleted. Args: - request (google.cloud.artifactregistry_v1beta2.types.DeleteVersionRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.DeleteVersionRequest, dict]): The request object. The request to delete a version. name (str): The name of the version to delete. @@ -1300,7 +1300,7 @@ def delete_version( def list_files( self, - request: file.ListFilesRequest = None, + request: Union[file.ListFilesRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1310,7 +1310,7 @@ def list_files( r"""Lists files. Args: - request (google.cloud.artifactregistry_v1beta2.types.ListFilesRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.ListFilesRequest, dict]): The request object. The request to list files. parent (str): The name of the parent resource whose @@ -1378,7 +1378,7 @@ def list_files( def get_file( self, - request: file.GetFileRequest = None, + request: Union[file.GetFileRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1388,7 +1388,7 @@ def get_file( r"""Gets a file. Args: - request (google.cloud.artifactregistry_v1beta2.types.GetFileRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.GetFileRequest, dict]): The request object. The request to retrieve a file. name (str): The name of the file to retrieve. @@ -1447,7 +1447,7 @@ def get_file( def list_tags( self, - request: tag.ListTagsRequest = None, + request: Union[tag.ListTagsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1457,7 +1457,7 @@ def list_tags( r"""Lists tags. Args: - request (google.cloud.artifactregistry_v1beta2.types.ListTagsRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.ListTagsRequest, dict]): The request object. The request to list tags. parent (str): The name of the parent resource whose @@ -1525,7 +1525,7 @@ def list_tags( def get_tag( self, - request: tag.GetTagRequest = None, + request: Union[tag.GetTagRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1535,7 +1535,7 @@ def get_tag( r"""Gets a tag. Args: - request (google.cloud.artifactregistry_v1beta2.types.GetTagRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.GetTagRequest, dict]): The request object. The request to retrieve a tag. name (str): The name of the tag to retrieve. @@ -1594,7 +1594,7 @@ def get_tag( def create_tag( self, - request: gda_tag.CreateTagRequest = None, + request: Union[gda_tag.CreateTagRequest, dict] = None, *, parent: str = None, tag: gda_tag.Tag = None, @@ -1606,7 +1606,7 @@ def create_tag( r"""Creates a tag. Args: - request (google.cloud.artifactregistry_v1beta2.types.CreateTagRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.CreateTagRequest, dict]): The request object. The request to create a new tag. parent (str): The name of the parent resource where @@ -1683,7 +1683,7 @@ def create_tag( def update_tag( self, - request: gda_tag.UpdateTagRequest = None, + request: Union[gda_tag.UpdateTagRequest, dict] = None, *, tag: gda_tag.Tag = None, update_mask: field_mask_pb2.FieldMask = None, @@ -1694,7 +1694,7 @@ def update_tag( r"""Updates a tag. Args: - request (google.cloud.artifactregistry_v1beta2.types.UpdateTagRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.UpdateTagRequest, dict]): The request object. The request to create or update a tag. tag (google.cloud.artifactregistry_v1beta2.types.Tag): @@ -1766,7 +1766,7 @@ def update_tag( def delete_tag( self, - request: tag.DeleteTagRequest = None, + request: Union[tag.DeleteTagRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1776,7 +1776,7 @@ def delete_tag( r"""Deletes a tag. Args: - request (google.cloud.artifactregistry_v1beta2.types.DeleteTagRequest): + request (Union[google.cloud.artifactregistry_v1beta2.types.DeleteTagRequest, dict]): The request object. The request to delete a tag. name (str): The name of the tag to delete. @@ -1827,7 +1827,7 @@ def delete_tag( def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1836,7 +1836,7 @@ def set_iam_policy( r"""Updates the IAM policy for a given resource. Args: - request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1931,7 +1931,7 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1940,7 +1940,7 @@ def get_iam_policy( r"""Gets the IAM policy for a given resource. Args: - request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): The request object. Request message for `GetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2035,7 +2035,7 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -2045,7 +2045,7 @@ def test_iam_permissions( resource. Args: - request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): The request object. Request message for `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/base.py b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/base.py index 8b3eef6..9ffcd1d 100644 --- a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/base.py +++ b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/base.py @@ -131,7 +131,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc.py b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc.py index b1ac1de..de44d13 100644 --- a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc.py +++ b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc.py @@ -103,16 +103,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc_asyncio.py b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc_asyncio.py index d2a405a..e389c4a 100644 --- a/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc_asyncio.py +++ b/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/grpc_asyncio.py @@ -150,16 +150,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/artifactregistry_v1beta2/types/package.py b/google/cloud/artifactregistry_v1beta2/types/package.py index 6a89202..fe6f5f7 100644 --- a/google/cloud/artifactregistry_v1beta2/types/package.py +++ b/google/cloud/artifactregistry_v1beta2/types/package.py @@ -32,6 +32,7 @@ class Package(proto.Message): r"""Packages are named collections of versions. + Attributes: name (str): The name of the package, for example: @@ -55,6 +56,7 @@ class Package(proto.Message): class ListPackagesRequest(proto.Message): r"""The request to list packages. + Attributes: parent (str): The name of the parent resource whose @@ -74,6 +76,7 @@ class ListPackagesRequest(proto.Message): class ListPackagesResponse(proto.Message): r"""The response from listing packages. + Attributes: packages (Sequence[google.cloud.artifactregistry_v1beta2.types.Package]): The packages returned. @@ -93,6 +96,7 @@ def raw_page(self): class GetPackageRequest(proto.Message): r"""The request to retrieve a package. + Attributes: name (str): The name of the package to retrieve. @@ -103,6 +107,7 @@ class GetPackageRequest(proto.Message): class DeletePackageRequest(proto.Message): r"""The request to delete a package. + Attributes: name (str): The name of the package to delete. diff --git a/google/cloud/artifactregistry_v1beta2/types/repository.py b/google/cloud/artifactregistry_v1beta2/types/repository.py index ce69609..09621c8 100644 --- a/google/cloud/artifactregistry_v1beta2/types/repository.py +++ b/google/cloud/artifactregistry_v1beta2/types/repository.py @@ -35,6 +35,7 @@ class Repository(proto.Message): r"""A Repository for storing artifacts with a specific format. + Attributes: name (str): The name of the repository, for example: @@ -84,6 +85,7 @@ class Format(proto.Enum): class ListRepositoriesRequest(proto.Message): r"""The request to list repositories. + Attributes: parent (str): The name of the parent resource whose @@ -103,6 +105,7 @@ class ListRepositoriesRequest(proto.Message): class ListRepositoriesResponse(proto.Message): r"""The response from listing repositories. + Attributes: repositories (Sequence[google.cloud.artifactregistry_v1beta2.types.Repository]): The repositories returned. @@ -122,6 +125,7 @@ def raw_page(self): class GetRepositoryRequest(proto.Message): r"""The request to retrieve a repository. + Attributes: name (str): The name of the repository to retrieve. @@ -132,6 +136,7 @@ class GetRepositoryRequest(proto.Message): class CreateRepositoryRequest(proto.Message): r"""The request to create a new repository. + Attributes: parent (str): The name of the parent resource where the @@ -149,6 +154,7 @@ class CreateRepositoryRequest(proto.Message): class UpdateRepositoryRequest(proto.Message): r"""The request to update a repository. + Attributes: repository (google.cloud.artifactregistry_v1beta2.types.Repository): The repository that replaces the resource on @@ -167,6 +173,7 @@ class UpdateRepositoryRequest(proto.Message): class DeleteRepositoryRequest(proto.Message): r"""The request to delete a repository. + Attributes: name (str): The name of the repository to delete. diff --git a/google/cloud/artifactregistry_v1beta2/types/tag.py b/google/cloud/artifactregistry_v1beta2/types/tag.py index a3a08af..676e97b 100644 --- a/google/cloud/artifactregistry_v1beta2/types/tag.py +++ b/google/cloud/artifactregistry_v1beta2/types/tag.py @@ -53,6 +53,7 @@ class Tag(proto.Message): class ListTagsRequest(proto.Message): r"""The request to list tags. + Attributes: parent (str): The name of the parent resource whose tags @@ -85,6 +86,7 @@ class ListTagsRequest(proto.Message): class ListTagsResponse(proto.Message): r"""The response from listing tags. + Attributes: tags (Sequence[google.cloud.artifactregistry_v1beta2.types.Tag]): The tags returned. @@ -103,6 +105,7 @@ def raw_page(self): class GetTagRequest(proto.Message): r"""The request to retrieve a tag. + Attributes: name (str): The name of the tag to retrieve. @@ -113,6 +116,7 @@ class GetTagRequest(proto.Message): class CreateTagRequest(proto.Message): r"""The request to create a new tag. + Attributes: parent (str): The name of the parent resource where the tag @@ -130,6 +134,7 @@ class CreateTagRequest(proto.Message): class UpdateTagRequest(proto.Message): r"""The request to create or update a tag. + Attributes: tag (google.cloud.artifactregistry_v1beta2.types.Tag): The tag that replaces the resource on the @@ -148,6 +153,7 @@ class UpdateTagRequest(proto.Message): class DeleteTagRequest(proto.Message): r"""The request to delete a tag. + Attributes: name (str): The name of the tag to delete. diff --git a/google/cloud/artifactregistry_v1beta2/types/version.py b/google/cloud/artifactregistry_v1beta2/types/version.py index 791c101..c086781 100644 --- a/google/cloud/artifactregistry_v1beta2/types/version.py +++ b/google/cloud/artifactregistry_v1beta2/types/version.py @@ -73,6 +73,7 @@ class Version(proto.Message): class ListVersionsRequest(proto.Message): r"""The request to list versions. + Attributes: parent (str): The name of the parent resource whose @@ -96,6 +97,7 @@ class ListVersionsRequest(proto.Message): class ListVersionsResponse(proto.Message): r"""The response from listing versions. + Attributes: versions (Sequence[google.cloud.artifactregistry_v1beta2.types.Version]): The versions returned. @@ -115,6 +117,7 @@ def raw_page(self): class GetVersionRequest(proto.Message): r"""The request to retrieve a version. + Attributes: name (str): The name of the version to retrieve. @@ -129,6 +132,7 @@ class GetVersionRequest(proto.Message): class DeleteVersionRequest(proto.Message): r"""The request to delete a version. + Attributes: name (str): The name of the version to delete. diff --git a/owlbot.py b/owlbot.py index aa11514..45b9158 100644 --- a/owlbot.py +++ b/owlbot.py @@ -22,17 +22,23 @@ common = gcp.CommonTemplates() -default_version = "v1beta2" +default_version = "v1" for library in s.get_staging_dirs(default_version): # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/cloud/artifactregistry_{library.name}/types/file.py", + s.replace(library / f"google/cloud/artifactregistry_{library.name}/types/*.py", r""". Attributes:""", r""".\n Attributes:""", ) + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/998 + s.replace(library / f"google/cloud/artifactregistry_{library.name}/types/artifact.py", + r""": - """, + r""":\n\n - """, + ) + s.move(library, excludes=["nox.py", "setup.py", "README.rst", "docs/index.rst"]) s.remove_staging_dirs() diff --git a/scripts/fixup_artifactregistry_v1_keywords.py b/scripts/fixup_artifactregistry_v1_keywords.py new file mode 100644 index 0000000..8c28ead --- /dev/null +++ b/scripts/fixup_artifactregistry_v1_keywords.py @@ -0,0 +1,178 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class artifactregistryCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'get_repository': ('name', ), + 'list_docker_images': ('parent', 'page_size', 'page_token', ), + 'list_repositories': ('parent', 'page_size', 'page_token', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=artifactregistryCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the artifactregistry client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_artifactregistry_v1beta2_keywords.py b/scripts/fixup_artifactregistry_v1beta2_keywords.py index a275fa0..436060c 100644 --- a/scripts/fixup_artifactregistry_v1beta2_keywords.py +++ b/scripts/fixup_artifactregistry_v1beta2_keywords.py @@ -39,27 +39,27 @@ def partition( class artifactregistryCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_repository': ('parent', 'repository_id', 'repository', ), - 'create_tag': ('parent', 'tag_id', 'tag', ), - 'delete_package': ('name', ), - 'delete_repository': ('name', ), - 'delete_tag': ('name', ), - 'delete_version': ('name', 'force', ), - 'get_file': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_package': ('name', ), - 'get_repository': ('name', ), - 'get_tag': ('name', ), - 'get_version': ('name', 'view', ), - 'list_files': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_packages': ('parent', 'page_size', 'page_token', ), - 'list_repositories': ('parent', 'page_size', 'page_token', ), - 'list_tags': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_versions': ('parent', 'page_size', 'page_token', 'view', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_repository': ('repository', 'update_mask', ), - 'update_tag': ('tag', 'update_mask', ), + 'create_repository': ('parent', 'repository_id', 'repository', ), + 'create_tag': ('parent', 'tag_id', 'tag', ), + 'delete_package': ('name', ), + 'delete_repository': ('name', ), + 'delete_tag': ('name', ), + 'delete_version': ('name', 'force', ), + 'get_file': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_package': ('name', ), + 'get_repository': ('name', ), + 'get_tag': ('name', ), + 'get_version': ('name', 'view', ), + 'list_files': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_packages': ('parent', 'page_size', 'page_token', ), + 'list_repositories': ('parent', 'page_size', 'page_token', ), + 'list_tags': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_versions': ('parent', 'page_size', 'page_token', 'view', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_repository': ('repository', 'update_mask', ), + 'update_tag': ('tag', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -78,7 +78,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/tests/unit/gapic/artifactregistry_v1/__init__.py b/tests/unit/gapic/artifactregistry_v1/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/artifactregistry_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py new file mode 100644 index 0000000..9ef2cc1 --- /dev/null +++ b/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -0,0 +1,2116 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.artifactregistry_v1.services.artifact_registry import ( + ArtifactRegistryAsyncClient, +) +from google.cloud.artifactregistry_v1.services.artifact_registry import ( + ArtifactRegistryClient, +) +from google.cloud.artifactregistry_v1.services.artifact_registry import pagers +from google.cloud.artifactregistry_v1.services.artifact_registry import transports +from google.cloud.artifactregistry_v1.services.artifact_registry.transports.base import ( + _GOOGLE_AUTH_VERSION, +) +from google.cloud.artifactregistry_v1.types import artifact +from google.cloud.artifactregistry_v1.types import repository +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ArtifactRegistryClient._get_default_mtls_endpoint(None) is None + assert ( + ArtifactRegistryClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ArtifactRegistryClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ArtifactRegistryClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ArtifactRegistryClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ArtifactRegistryClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [ArtifactRegistryClient, ArtifactRegistryAsyncClient,] +) +def test_artifact_registry_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "artifactregistry.googleapis.com:443" + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ArtifactRegistryGrpcTransport, "grpc"), + (transports.ArtifactRegistryGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_artifact_registry_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class", [ArtifactRegistryClient, ArtifactRegistryAsyncClient,] +) +def test_artifact_registry_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "artifactregistry.googleapis.com:443" + + +def test_artifact_registry_client_get_transport_class(): + transport = ArtifactRegistryClient.get_transport_class() + available_transports = [ + transports.ArtifactRegistryGrpcTransport, + ] + assert transport in available_transports + + transport = ArtifactRegistryClient.get_transport_class("grpc") + assert transport == transports.ArtifactRegistryGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ArtifactRegistryClient, transports.ArtifactRegistryGrpcTransport, "grpc"), + ( + ArtifactRegistryAsyncClient, + transports.ArtifactRegistryGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ArtifactRegistryClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ArtifactRegistryClient), +) +@mock.patch.object( + ArtifactRegistryAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ArtifactRegistryAsyncClient), +) +def test_artifact_registry_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ArtifactRegistryClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ArtifactRegistryClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ArtifactRegistryClient, + transports.ArtifactRegistryGrpcTransport, + "grpc", + "true", + ), + ( + ArtifactRegistryAsyncClient, + transports.ArtifactRegistryGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ArtifactRegistryClient, + transports.ArtifactRegistryGrpcTransport, + "grpc", + "false", + ), + ( + ArtifactRegistryAsyncClient, + transports.ArtifactRegistryGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ArtifactRegistryClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ArtifactRegistryClient), +) +@mock.patch.object( + ArtifactRegistryAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ArtifactRegistryAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_artifact_registry_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ArtifactRegistryClient, transports.ArtifactRegistryGrpcTransport, "grpc"), + ( + ArtifactRegistryAsyncClient, + transports.ArtifactRegistryGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_artifact_registry_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ArtifactRegistryClient, transports.ArtifactRegistryGrpcTransport, "grpc"), + ( + ArtifactRegistryAsyncClient, + transports.ArtifactRegistryGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_artifact_registry_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_artifact_registry_client_client_options_from_dict(): + with mock.patch( + "google.cloud.artifactregistry_v1.services.artifact_registry.transports.ArtifactRegistryGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ArtifactRegistryClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_docker_images( + transport: str = "grpc", request_type=artifact.ListDockerImagesRequest +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = artifact.ListDockerImagesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_docker_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == artifact.ListDockerImagesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDockerImagesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_docker_images_from_dict(): + test_list_docker_images(request_type=dict) + + +def test_list_docker_images_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + client.list_docker_images() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == artifact.ListDockerImagesRequest() + + +@pytest.mark.asyncio +async def test_list_docker_images_async( + transport: str = "grpc_asyncio", request_type=artifact.ListDockerImagesRequest +): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListDockerImagesResponse(next_page_token="next_page_token_value",) + ) + response = await client.list_docker_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == artifact.ListDockerImagesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDockerImagesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_docker_images_async_from_dict(): + await test_list_docker_images_async(request_type=dict) + + +def test_list_docker_images_field_headers(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = artifact.ListDockerImagesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + call.return_value = artifact.ListDockerImagesResponse() + client.list_docker_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_docker_images_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = artifact.ListDockerImagesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListDockerImagesResponse() + ) + await client.list_docker_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_docker_images_flattened(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = artifact.ListDockerImagesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_docker_images(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_docker_images_flattened_error(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_docker_images( + artifact.ListDockerImagesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_docker_images_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = artifact.ListDockerImagesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + artifact.ListDockerImagesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_docker_images(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_docker_images_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_docker_images( + artifact.ListDockerImagesRequest(), parent="parent_value", + ) + + +def test_list_docker_images_pager(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), + artifact.DockerImage(), + artifact.DockerImage(), + ], + next_page_token="abc", + ), + artifact.ListDockerImagesResponse(docker_images=[], next_page_token="def",), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(),], next_page_token="ghi", + ), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(), artifact.DockerImage(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_docker_images(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, artifact.DockerImage) for i in results) + + +def test_list_docker_images_pages(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), + artifact.DockerImage(), + artifact.DockerImage(), + ], + next_page_token="abc", + ), + artifact.ListDockerImagesResponse(docker_images=[], next_page_token="def",), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(),], next_page_token="ghi", + ), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(), artifact.DockerImage(),], + ), + RuntimeError, + ) + pages = list(client.list_docker_images(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_docker_images_async_pager(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), + artifact.DockerImage(), + artifact.DockerImage(), + ], + next_page_token="abc", + ), + artifact.ListDockerImagesResponse(docker_images=[], next_page_token="def",), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(),], next_page_token="ghi", + ), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(), artifact.DockerImage(),], + ), + RuntimeError, + ) + async_pager = await client.list_docker_images(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, artifact.DockerImage) for i in responses) + + +@pytest.mark.asyncio +async def test_list_docker_images_async_pages(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_docker_images), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + artifact.ListDockerImagesResponse( + docker_images=[ + artifact.DockerImage(), + artifact.DockerImage(), + artifact.DockerImage(), + ], + next_page_token="abc", + ), + artifact.ListDockerImagesResponse(docker_images=[], next_page_token="def",), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(),], next_page_token="ghi", + ), + artifact.ListDockerImagesResponse( + docker_images=[artifact.DockerImage(), artifact.DockerImage(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_docker_images(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_repositories( + transport: str = "grpc", request_type=repository.ListRepositoriesRequest +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = repository.ListRepositoriesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repository.ListRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_repositories_from_dict(): + test_list_repositories(request_type=dict) + + +def test_list_repositories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + client.list_repositories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repository.ListRepositoriesRequest() + + +@pytest.mark.asyncio +async def test_list_repositories_async( + transport: str = "grpc_asyncio", request_type=repository.ListRepositoriesRequest +): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.ListRepositoriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repository.ListRepositoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRepositoriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_repositories_async_from_dict(): + await test_list_repositories_async(request_type=dict) + + +def test_list_repositories_field_headers(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repository.ListRepositoriesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + call.return_value = repository.ListRepositoriesResponse() + client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_repositories_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repository.ListRepositoriesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.ListRepositoriesResponse() + ) + await client.list_repositories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_repositories_flattened(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = repository.ListRepositoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_repositories(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_repositories_flattened_error(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_repositories( + repository.ListRepositoriesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_repositories_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = repository.ListRepositoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.ListRepositoriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_repositories(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_repositories_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_repositories( + repository.ListRepositoriesRequest(), parent="parent_value", + ) + + +def test_list_repositories_pager(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + repository.Repository(), + repository.Repository(), + ], + next_page_token="abc", + ), + repository.ListRepositoriesResponse( + repositories=[], next_page_token="def", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(),], next_page_token="ghi", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(), repository.Repository(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_repositories(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, repository.Repository) for i in results) + + +def test_list_repositories_pages(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + repository.Repository(), + repository.Repository(), + ], + next_page_token="abc", + ), + repository.ListRepositoriesResponse( + repositories=[], next_page_token="def", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(),], next_page_token="ghi", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(), repository.Repository(),], + ), + RuntimeError, + ) + pages = list(client.list_repositories(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_repositories_async_pager(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + repository.Repository(), + repository.Repository(), + ], + next_page_token="abc", + ), + repository.ListRepositoriesResponse( + repositories=[], next_page_token="def", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(),], next_page_token="ghi", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(), repository.Repository(),], + ), + RuntimeError, + ) + async_pager = await client.list_repositories(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, repository.Repository) for i in responses) + + +@pytest.mark.asyncio +async def test_list_repositories_async_pages(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_repositories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + repository.ListRepositoriesResponse( + repositories=[ + repository.Repository(), + repository.Repository(), + repository.Repository(), + ], + next_page_token="abc", + ), + repository.ListRepositoriesResponse( + repositories=[], next_page_token="def", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(),], next_page_token="ghi", + ), + repository.ListRepositoriesResponse( + repositories=[repository.Repository(), repository.Repository(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_repositories(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_repository( + transport: str = "grpc", request_type=repository.GetRepositoryRequest +): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = repository.Repository( + name="name_value", + format_=repository.Repository.Format.DOCKER, + description="description_value", + kms_key_name="kms_key_name_value", + ) + response = client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == repository.GetRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repository.Repository) + assert response.name == "name_value" + assert response.format_ == repository.Repository.Format.DOCKER + assert response.description == "description_value" + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_repository_from_dict(): + test_get_repository(request_type=dict) + + +def test_get_repository_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + client.get_repository() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == repository.GetRepositoryRequest() + + +@pytest.mark.asyncio +async def test_get_repository_async( + transport: str = "grpc_asyncio", request_type=repository.GetRepositoryRequest +): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.Repository( + name="name_value", + format_=repository.Repository.Format.DOCKER, + description="description_value", + kms_key_name="kms_key_name_value", + ) + ) + response = await client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == repository.GetRepositoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, repository.Repository) + assert response.name == "name_value" + assert response.format_ == repository.Repository.Format.DOCKER + assert response.description == "description_value" + assert response.kms_key_name == "kms_key_name_value" + + +@pytest.mark.asyncio +async def test_get_repository_async_from_dict(): + await test_get_repository_async(request_type=dict) + + +def test_get_repository_field_headers(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repository.GetRepositoryRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + call.return_value = repository.Repository() + client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_repository_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = repository.GetRepositoryRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.Repository() + ) + await client.get_repository(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_repository_flattened(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = repository.Repository() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_repository(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_repository_flattened_error(): + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_repository( + repository.GetRepositoryRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_repository_flattened_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_repository), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = repository.Repository() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + repository.Repository() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_repository(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_repository_flattened_error_async(): + client = ArtifactRegistryAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_repository( + repository.GetRepositoryRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ArtifactRegistryClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ArtifactRegistryClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ArtifactRegistryGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ArtifactRegistryGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ArtifactRegistryClient(credentials=ga_credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.ArtifactRegistryGrpcTransport,) + + +def test_artifact_registry_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ArtifactRegistryTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_artifact_registry_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.artifactregistry_v1.services.artifact_registry.transports.ArtifactRegistryTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ArtifactRegistryTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_docker_images", + "list_repositories", + "get_repository", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_artifact_registry_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.artifactregistry_v1.services.artifact_registry.transports.ArtifactRegistryTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ArtifactRegistryTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_artifact_registry_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.artifactregistry_v1.services.artifact_registry.transports.ArtifactRegistryTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ArtifactRegistryTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + quota_project_id="octopus", + ) + + +def test_artifact_registry_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.artifactregistry_v1.services.artifact_registry.transports.ArtifactRegistryTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ArtifactRegistryTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_artifact_registry_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ArtifactRegistryClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_artifact_registry_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ArtifactRegistryClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_artifact_registry_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_artifact_registry_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ArtifactRegistryGrpcTransport, grpc_helpers), + (transports.ArtifactRegistryGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_artifact_registry_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "artifactregistry.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ), + scopes=["1", "2"], + default_host="artifactregistry.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + ], +) +def test_artifact_registry_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_artifact_registry_host_no_port(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="artifactregistry.googleapis.com" + ), + ) + assert client.transport._host == "artifactregistry.googleapis.com:443" + + +def test_artifact_registry_host_with_port(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="artifactregistry.googleapis.com:8000" + ), + ) + assert client.transport._host == "artifactregistry.googleapis.com:8000" + + +def test_artifact_registry_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ArtifactRegistryGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_artifact_registry_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ArtifactRegistryGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + ], +) +def test_artifact_registry_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ArtifactRegistryGrpcTransport, + transports.ArtifactRegistryGrpcAsyncIOTransport, + ], +) +def test_artifact_registry_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_docker_image_path(): + project = "squid" + location = "clam" + repository = "whelk" + docker_image = "octopus" + expected = "projects/{project}/locations/{location}/repositories/{repository}/dockerImages/{docker_image}".format( + project=project, + location=location, + repository=repository, + docker_image=docker_image, + ) + actual = ArtifactRegistryClient.docker_image_path( + project, location, repository, docker_image + ) + assert expected == actual + + +def test_parse_docker_image_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "repository": "cuttlefish", + "docker_image": "mussel", + } + path = ArtifactRegistryClient.docker_image_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_docker_image_path(path) + assert expected == actual + + +def test_repository_path(): + project = "winkle" + location = "nautilus" + repository = "scallop" + expected = "projects/{project}/locations/{location}/repositories/{repository}".format( + project=project, location=location, repository=repository, + ) + actual = ArtifactRegistryClient.repository_path(project, location, repository) + assert expected == actual + + +def test_parse_repository_path(): + expected = { + "project": "abalone", + "location": "squid", + "repository": "clam", + } + path = ArtifactRegistryClient.repository_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_repository_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ArtifactRegistryClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ArtifactRegistryClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder,) + actual = ArtifactRegistryClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ArtifactRegistryClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization,) + actual = ArtifactRegistryClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ArtifactRegistryClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project,) + actual = ArtifactRegistryClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ArtifactRegistryClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = ArtifactRegistryClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ArtifactRegistryClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ArtifactRegistryClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ArtifactRegistryTransport, "_prep_wrapped_messages" + ) as prep: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ArtifactRegistryTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ArtifactRegistryClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)