diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 8df56656..a194a9ea 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 00000000..412b0b56 --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 00000000..d653dd86 --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Ɓukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 01a16ec8..4206e8ac 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-logging/.kokoro/trampoline.sh" +build_file: "python-logging/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 00000000..11181078 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index d01483d2..8acb14e8 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -13,33 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-logging - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -54,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release.sh b/.kokoro/release.sh index c9b0928b..2ef944a0 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg new file mode 100644 index 00000000..ceb20370 --- /dev/null +++ b/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 00000000..50fec964 --- /dev/null +++ b/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 00000000..a9d6d48c --- /dev/null +++ b/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 00000000..7218af14 --- /dev/null +++ b/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 00000000..50fec964 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 00000000..1f7cc197 --- /dev/null +++ b/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 00000000..50fec964 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 00000000..9ba81c4b --- /dev/null +++ b/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 00000000..50fec964 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh new file mode 100755 index 00000000..ba97b53d --- /dev/null +++ b/.kokoro/test-samples.sh @@ -0,0 +1,110 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-logging + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 00000000..719bcd5b --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 00000000..995ee291 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/docs/conf.py b/docs/conf.py index 45db4f8b..7a03936b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -340,7 +340,10 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/google/cloud/logging/entries.py b/google/cloud/logging/entries.py index ed1c2816..3847102d 100644 --- a/google/cloud/logging/entries.py +++ b/google/cloud/logging/entries.py @@ -232,8 +232,7 @@ def from_api_repr(cls, resource, client, loggers=None): return inst def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = {} if self.log_name is not None: info["logName"] = self.log_name @@ -285,8 +284,7 @@ def _extract_payload(cls, resource): return resource["textPayload"] def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = super(TextEntry, self).to_api_repr() info["textPayload"] = self.payload return info @@ -313,8 +311,7 @@ def _extract_payload(cls, resource): return resource["jsonPayload"] def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = super(StructEntry, self).to_api_repr() info["jsonPayload"] = self.payload return info @@ -351,8 +348,7 @@ def payload_json(self): return self.payload def to_api_repr(self): - """API repr (JSON format) for entry. - """ + """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() info["protoPayload"] = MessageToDict(self.payload) return info diff --git a/google/cloud/logging/handlers/_helpers.py b/google/cloud/logging/handlers/_helpers.py index d65a2690..b4b7fcf5 100644 --- a/google/cloud/logging/handlers/_helpers.py +++ b/google/cloud/logging/handlers/_helpers.py @@ -41,8 +41,8 @@ def format_stackdriver_json(record, message): """Helper to format a LogRecord in in Stackdriver fluentd format. - :rtype: str - :returns: JSON str to be written to the log file. + :rtype: str + :returns: JSON str to be written to the log file. """ subsecond, second = math.modf(record.created) diff --git a/google/cloud/logging/logger.py b/google/cloud/logging/logger.py index b212b6e8..6b5445d0 100644 --- a/google/cloud/logging/logger.py +++ b/google/cloud/logging/logger.py @@ -114,8 +114,7 @@ def batch(self, client=None): return Batch(self, client) def _do_log(self, client, _entry_class, payload=None, **kw): - """Helper for :meth:`log_empty`, :meth:`log_text`, etc. - """ + """Helper for :meth:`log_empty`, :meth:`log_text`, etc.""" client = self._require_client(client) # Apply defaults diff --git a/google/cloud/logging_v2/gapic/config_service_v2_client.py b/google/cloud/logging_v2/gapic/config_service_v2_client.py index 37dafa34..d3d08370 100644 --- a/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -40,7 +40,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-logging", +).version class ConfigServiceV2Client(object): @@ -77,7 +79,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, + "billingAccounts/{billing_account}", + billing_account=billing_account, ) @classmethod @@ -110,7 +113,10 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) + return google.api_core.path_template.expand( + "folders/{folder}", + folder=folder, + ) @classmethod def folder_exclusion_path(cls, folder, exclusion): @@ -125,14 +131,17 @@ def folder_exclusion_path(cls, folder, exclusion): def folder_sink_path(cls, folder, sink): """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, + "folders/{folder}/sinks/{sink}", + folder=folder, + sink=sink, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, + "organizations/{organization}", + organization=organization, ) @classmethod @@ -157,14 +166,17 @@ def organization_sink_path(cls, organization, sink): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project, + "projects/{project}", + project=project, ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink, + "projects/{project}/sinks/{sink}", + project=project, + sink=sink, ) def __init__( @@ -253,8 +265,12 @@ def __init__( ) self.transport = transport else: - self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, + self.transport = ( + config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( + address=api_endpoint, + channel=channel, + credentials=credentials, + ) ) if client_info is None: @@ -360,7 +376,8 @@ def list_sinks( ) request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size, + parent=parent, + page_size=page_size, ) if metadata is None: metadata = [] @@ -450,7 +467,9 @@ def get_sink( client_info=self._client_info, ) - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) + request = logging_config_pb2.GetSinkRequest( + sink_name=sink_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -554,7 +573,9 @@ def create_sink( ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, + parent=parent, + sink=sink, + unique_writer_identity=unique_writer_identity, ) if metadata is None: metadata = [] @@ -759,7 +780,9 @@ def delete_sink( client_info=self._client_info, ) - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) + request = logging_config_pb2.DeleteSinkRequest( + sink_name=sink_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -857,7 +880,8 @@ def list_exclusions( ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size, + parent=parent, + page_size=page_size, ) if metadata is None: metadata = [] @@ -947,7 +971,9 @@ def get_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.GetExclusionRequest(name=name,) + request = logging_config_pb2.GetExclusionRequest( + name=name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -1038,7 +1064,8 @@ def create_exclusion( ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion, + parent=parent, + exclusion=exclusion, ) if metadata is None: metadata = [] @@ -1142,7 +1169,9 @@ def update_exclusion( ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask, + name=name, + exclusion=exclusion, + update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1218,7 +1247,9 @@ def delete_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.DeleteExclusionRequest(name=name,) + request = logging_config_pb2.DeleteExclusionRequest( + name=name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -1306,7 +1337,9 @@ def get_cmek_settings( client_info=self._client_info, ) - request = logging_config_pb2.GetCmekSettingsRequest(name=name,) + request = logging_config_pb2.GetCmekSettingsRequest( + name=name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -1422,7 +1455,9 @@ def update_cmek_settings( ) request = logging_config_pb2.UpdateCmekSettingsRequest( - name=name, cmek_settings=cmek_settings, update_mask=update_mask, + name=name, + cmek_settings=cmek_settings, + update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/google/cloud/logging_v2/gapic/logging_service_v2_client.py index c43506d1..c823deac 100644 --- a/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -44,7 +44,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-logging", +).version class LoggingServiceV2Client(object): @@ -81,7 +83,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, + "billingAccounts/{billing_account}", + billing_account=billing_account, ) @classmethod @@ -96,27 +99,35 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) + return google.api_core.path_template.expand( + "folders/{folder}", + folder=folder, + ) @classmethod def folder_log_path(cls, folder, log): """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log, + "folders/{folder}/logs/{log}", + folder=folder, + log=log, ) @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log, + "projects/{project}/logs/{log}", + project=project, + log=log, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, + "organizations/{organization}", + organization=organization, ) @classmethod @@ -132,7 +143,8 @@ def organization_log_path(cls, organization, log): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project, + "projects/{project}", + project=project, ) def __init__( @@ -221,8 +233,12 @@ def __init__( ) self.transport = transport else: - self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, + self.transport = ( + logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( + address=api_endpoint, + channel=channel, + credentials=credentials, + ) ) if client_info is None: @@ -311,7 +327,9 @@ def delete_log( client_info=self._client_info, ) - request = logging_pb2.DeleteLogRequest(log_name=log_name,) + request = logging_pb2.DeleteLogRequest( + log_name=log_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -776,7 +794,10 @@ def list_logs( client_info=self._client_info, ) - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) + request = logging_pb2.ListLogsRequest( + parent=parent, + page_size=page_size, + ) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 0c80a5d4..87cf5b89 100644 --- a/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -46,7 +46,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-logging", +).version class MetricsServiceV2Client(object): @@ -83,33 +85,41 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account, + "billingAccounts/{billing_account}", + billing_account=billing_account, ) @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder,) + return google.api_core.path_template.expand( + "folders/{folder}", + folder=folder, + ) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric, + "projects/{project}/metrics/{metric}", + project=project, + metric=metric, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization, + "organizations/{organization}", + organization=organization, ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project, + "projects/{project}", + project=project, ) def __init__( @@ -198,8 +208,12 @@ def __init__( ) self.transport = transport else: - self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, + self.transport = ( + metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( + address=api_endpoint, + channel=channel, + credentials=credentials, + ) ) if client_info is None: @@ -302,7 +316,8 @@ def list_log_metrics( ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size, + parent=parent, + page_size=page_size, ) if metadata is None: metadata = [] @@ -387,7 +402,9 @@ def get_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) + request = logging_metrics_pb2.GetLogMetricRequest( + metric_name=metric_name, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -473,7 +490,8 @@ def create_log_metric( ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric, + parent=parent, + metric=metric, ) if metadata is None: metadata = [] @@ -560,7 +578,8 @@ def update_log_metric( ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric, + metric_name=metric_name, + metric=metric, ) if metadata is None: metadata = [] @@ -631,7 +650,9 @@ def delete_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) + request = logging_metrics_pb2.DeleteLogMetricRequest( + metric_name=metric_name, + ) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/google/cloud/logging_v2/proto/log_entry_pb2.py b/google/cloud/logging_v2/proto/log_entry_pb2.py index f4805192..9b0ef220 100644 --- a/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -427,7 +427,9 @@ ), ], extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY,], + nested_types=[ + _LOGENTRY_LABELSENTRY, + ], enum_types=[], serialized_options=_b( "\352A\271\001\n\032logging.googleapis.com/Log\022\035projects/{project}/logs/{log}\022'organizations/{organization}/logs/{log}\022\033folders/{folder}/logs/{log}\022,billingAccounts/{billing_account}/logs/{log}\032\010log_name" diff --git a/google/cloud/logging_v2/proto/logging_config_pb2.py b/google/cloud/logging_v2/proto/logging_config_pb2.py index 65fd2cff..7e4ae83d 100644 --- a/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -320,7 +320,9 @@ ], extensions=[], nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT,], + enum_types=[ + _LOGSINK_VERSIONFORMAT, + ], serialized_options=_b( "\352A\270\001\n\033logging.googleapis.com/Sink\022\037projects/{project}/sinks/{sink}\022)organizations/{organization}/sinks/{sink}\022\035folders/{folder}/sinks/{sink}\022.billingAccounts/{billing_account}/sinks/{sink}" ), diff --git a/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index c2e910e1..62e751bf 100644 --- a/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -8,15 +8,14 @@ class ConfigServiceV2Stub(object): - """Service for configuring sinks used to route log entries. - """ + """Service for configuring sinks used to route log entries.""" def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.ListSinks = channel.unary_unary( "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.ListSinksRequest.SerializeToString, @@ -80,85 +79,78 @@ def __init__(self, channel): class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to route log entries. - """ + """Service for configuring sinks used to route log entries.""" def ListSinks(self, request, context): - """Lists sinks. - """ + """Lists sinks.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetSink(self, request, context): - """Gets a sink. - """ + """Gets a sink.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateSink(self, request, context): """Creates a sink that exports specified log entries to a destination. The - export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can - export log entries only from the resource owning the sink. - """ + export of newly-ingested log entries begins immediately, unless the sink's + `writer_identity` is not permitted to write to the destination. A sink can + export log entries only from the resource owning the sink. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateSink(self, request, context): """Updates a sink. This method replaces the following fields in the existing - sink with values from the new sink: `destination`, and `filter`. + sink with values from the new sink: `destination`, and `filter`. - The updated sink might also have a new `writer_identity`; see the - `unique_writer_identity` field. - """ + The updated sink might also have a new `writer_identity`; see the + `unique_writer_identity` field. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteSink(self, request, context): """Deletes a sink. If the sink has a unique `writer_identity`, then that - service account is also deleted. - """ + service account is also deleted. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListExclusions(self, request, context): - """Lists all the exclusions in a parent resource. - """ + """Lists all the exclusions in a parent resource.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetExclusion(self, request, context): - """Gets the description of an exclusion. - """ + """Gets the description of an exclusion.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateExclusion(self, request, context): """Creates a new exclusion in a specified parent resource. - Only log entries belonging to that resource can be excluded. - You can have up to 10 exclusions in a resource. - """ + Only log entries belonging to that resource can be excluded. + You can have up to 10 exclusions in a resource. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateExclusion(self, request, context): - """Changes one or more properties of an existing exclusion. - """ + """Changes one or more properties of an existing exclusion.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteExclusion(self, request, context): - """Deletes an exclusion. - """ + """Deletes an exclusion.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -166,13 +158,13 @@ def DeleteExclusion(self, request, context): def GetCmekSettings(self, request, context): """Gets the Logs Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders in - the GCP organization. + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders in + the GCP organization. - See [Enabling CMEK for Logs - Router](/logging/docs/routing/managed-encryption) for more information. - """ + See [Enabling CMEK for Logs + Router](/logging/docs/routing/managed-encryption) for more information. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -180,19 +172,19 @@ def GetCmekSettings(self, request, context): def UpdateCmekSettings(self, request, context): """Updates the Logs Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured for GCP - organizations. Once configured, it applies to all projects and folders in - the GCP organization. + Note: CMEK for the Logs Router can currently only be configured for GCP + organizations. Once configured, it applies to all projects and folders in + the GCP organization. - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) `kms_key_name` is invalid, or 2) the associated service - account does not have the required - `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or - 3) access to the key is disabled. + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) `kms_key_name` is invalid, or 2) the associated service + account does not have the required + `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or + 3) access to the key is disabled. - See [Enabling CMEK for Logs - Router](/logging/docs/routing/managed-encryption) for more information. - """ + See [Enabling CMEK for Logs + Router](/logging/docs/routing/managed-encryption) for more information. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 01e308fb..08eaf809 100644 --- a/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -318,8 +318,12 @@ ), ], extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], - enum_types=[_LOGMETRIC_APIVERSION,], + nested_types=[ + _LOGMETRIC_LABELEXTRACTORSENTRY, + ], + enum_types=[ + _LOGMETRIC_APIVERSION, + ], serialized_options=_b( "\352AD\n\035logging.googleapis.com/Metric\022#projects/{project}/metrics/{metric}" ), diff --git a/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py index 09f84e03..a3a37336 100644 --- a/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ b/google/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py @@ -8,15 +8,14 @@ class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. - """ + """Service for configuring logs-based metrics.""" def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.ListLogMetrics = channel.unary_unary( "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, @@ -45,40 +44,34 @@ def __init__(self, channel): class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. - """ + """Service for configuring logs-based metrics.""" def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ + """Lists logs-based metrics.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ + """Gets a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ + """Creates a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ + """Creates or updates a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ + """Deletes a logs-based metric.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/google/cloud/logging_v2/proto/logging_pb2.py b/google/cloud/logging_v2/proto/logging_pb2.py index 35c9b9c5..08cc2b49 100644 --- a/google/cloud/logging_v2/proto/logging_pb2.py +++ b/google/cloud/logging_v2/proto/logging_pb2.py @@ -274,7 +274,9 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], + nested_types=[ + _WRITELOGENTRIESREQUEST_LABELSENTRY, + ], enum_types=[], serialized_options=None, is_extendable=False, @@ -389,7 +391,9 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], + nested_types=[ + _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, + ], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/google/cloud/logging_v2/proto/logging_pb2_grpc.py index e1759bbc..2e444b92 100644 --- a/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -8,15 +8,14 @@ class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. - """ + """Service for ingesting and querying logs.""" def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.DeleteLog = channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, @@ -45,52 +44,50 @@ def __init__(self, channel): class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. - """ + """Service for ingesting and querying logs.""" def DeleteLog(self, request, context): """Deletes all the log entries in a log. The log reappears if it receives new - entries. Log entries written shortly before the delete operation might not - be deleted. Entries received after the delete operation with a timestamp - before the operation will be deleted. - """ + entries. Log entries written shortly before the delete operation might not + be deleted. Entries received after the delete operation with a timestamp + before the operation will be deleted. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def WriteLogEntries(self, request, context): """Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - """ + only way to send log entries to Logging. This method + is used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use Logging. + A single request may contain log entries for a maximum of 1000 + different resources (projects, organizations, billing accounts or + folders) + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListLogEntries(self, request, context): """Lists log entries. Use this method to retrieve log entries that originated - from a project/folder/organization/billing account. For ways to export log - entries, see [Exporting Logs](/logging/docs/export). - """ + from a project/folder/organization/billing account. For ways to export log + entries, see [Exporting Logs](/logging/docs/export). + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging. - """ + """Lists the descriptors for monitored resource types used by Logging.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListLogs(self, request, context): """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ + Only logs that have entries are listed. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") diff --git a/noxfile.py b/noxfile.py index 826477c0..5e150814 100644 --- a/noxfile.py +++ b/noxfile.py @@ -114,7 +114,7 @@ def unit(session): default(session) -@nox.session(python=['2.7', '3.6']) +@nox.session(python=['3.6']) def system(session): """Run the system test suite.""" diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh new file mode 100755 index 00000000..ff599eb2 --- /dev/null +++ b/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/testing/.gitignore b/testing/.gitignore new file mode 100644 index 00000000..b05fbd63 --- /dev/null +++ b/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/tests/system/test_system.py b/tests/system/test_system.py index ea51aa8f..db6dbe95 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -425,11 +425,11 @@ def test_create_sink_pubsub_topic(self): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(Config.CLIENT.project, TOPIC_NAME) self.to_delete.append(_DeleteWrapper(publisher, topic_path)) - publisher.create_topic(topic_path) + publisher.create_topic(request={"name": topic_path}) - policy = publisher.get_iam_policy(topic_path) + policy = publisher.get_iam_policy(request={"resource": topic_path}) policy.bindings.add(role="roles/owner", members=["group:cloud-logs@google.com"]) - publisher.set_iam_policy(topic_path, policy) + publisher.set_iam_policy(request={"resource": topic_path, "policy": policy}) TOPIC_URI = "pubsub.googleapis.com/%s" % (topic_path,) @@ -536,4 +536,4 @@ def __init__(self, publisher, topic_path): self.topic_path = topic_path def delete(self): - self.publisher.delete_topic(self.topic_path) + self.publisher.delete_topic(request={"topic": self.topic_path}) diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index 83de87aa..7ad247ca 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -49,10 +49,19 @@ def test_default_url(self): self.assertIs(conn._client, client) def test_build_api_url_w_custom_endpoint(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + custom_endpoint = "https://foo-logging.googleapis.com" conn = self._make_one(object(), api_endpoint=custom_endpoint) - URI = "/".join([custom_endpoint, conn.API_VERSION, "foo"]) - self.assertEqual(conn.build_api_url("/foo"), URI) + uri = conn.build_api_url("/foo") + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual("%s://%s" % (scheme, netloc), custom_endpoint) + self.assertEqual(path, "/".join(["", conn.API_VERSION, "foo"])) + parms = dict(parse_qsl(qs)) + pretty_print = parms.pop("prettyPrint", "false") + self.assertEqual(pretty_print, "false") + self.assertEqual(parms, {}) def test_extra_headers(self): import requests