diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000..7c2be3b --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + digest: sha256:52b5557b7155a80e6f1684c2376d5eef0df6d8d5c785551e1ff8cc000603b62a + image: gcr.io/cloud-devrel-public-resources/owlbot-java:latest \ No newline at end of file diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000..22cc68a --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,32 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: "gcr.io/repo-automation-bots/owlbot-java:latest" + +deep-remove-regex: +- "/grpc-google-.*/src" +- "/proto-google-.*/src" +- "/google-.*/src" + +deep-preserve-regex: +- "/google-.*/src/test/java/com/google/cloud/.*/v.*/it/IT.*Test.java" + +deep-copy-regex: +- source: "/google/cloud/bigquery/migration/(v.*)/.*-java/proto-google-.*/src" + dest: "/owl-bot-staging/$1/proto-google-cloud-bigquerymigration-$1/src" +- source: "/google/cloud/bigquery/migration/(v.*)/.*-java/grpc-google-.*/src" + dest: "/owl-bot-staging/$1/grpc-google-cloud-bigquerymigration-$1/src" +- source: "/google/cloud/bigquery/migration/(v.*)/.*-java/gapic-google-.*/src" + dest: "/owl-bot-staging/$1/google-cloud-bigquerymigration/src" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..30fdb7b --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,10 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. + +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +* @googleapis/yoshi-java + +# The java-samples-reviewers team is the default owner for samples changes +samples/**/*.java @googleapis/java-samples-reviewers diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..0ee7884 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,51 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/java-bigquerymigration/issues + - Check for answers on StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform + +If you are still having issues, please include as much information as possible: + +#### Environment details + +1. Specify the API at the beginning of the title. For example, "BigQuery: ..."). + General, Core, and Other are also allowed as types +2. OS type and version: +3. Java version: +4. bigquerymigration version(s): + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```java +// example +``` + +#### Stack trace +``` +Any relevant stacktrace here. +``` + +#### External references such as API reference guides + +- ? + +#### Any additional information below + + +Following these steps guarantees the quickest resolution possible. + +Thanks! diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..754e30c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,21 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +**Is your feature request related to a problem? Please describe.** +What the problem is. Example: I'm always frustrated when [...] + +**Describe the solution you'd like** +What you want to happen. + +**Describe alternatives you've considered** +Any alternative solutions or features you've considered. + +**Additional context** +Any other context or screenshots about the feature request. diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 0000000..9958690 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..88f7e50 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/java-bigquerymigration/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # ☕️ diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 0000000..1a23ea4 --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,7 @@ +# Configuration for the Blunderbuss GitHub app. For more info see +# https://github.com/googleapis/repo-automation-bots/tree/master/packages/blunderbuss +assign_prs_by: +- labels: + - samples + to: + - googleapis/java-samples-reviewers \ No newline at end of file diff --git a/.github/generated-files-bot.yml b/.github/generated-files-bot.yml new file mode 100644 index 0000000..c644a24 --- /dev/null +++ b/.github/generated-files-bot.yml @@ -0,0 +1,12 @@ +externalManifests: +- type: json + file: 'synth.metadata' + jsonpath: '$.generatedFiles[*]' +- type: json + file: '.github/readme/synth.metadata/synth.metadata' + jsonpath: '$.generatedFiles[*]' +ignoreAuthors: +- 'renovate-bot' +- 'yoshi-automation' +- 'release-please[bot]' +- 'gcf-owl-bot[bot]' diff --git a/.github/release-please.yml b/.github/release-please.yml new file mode 100644 index 0000000..8ca7f9c --- /dev/null +++ b/.github/release-please.yml @@ -0,0 +1,3 @@ +bumpMinorPreMajor: true +handleGHRelease: true +releaseType: java-yoshi diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 0000000..d4ca941 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 0000000..e69de29 diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml new file mode 100644 index 0000000..1e6162b --- /dev/null +++ b/.github/sync-repo-settings.yaml @@ -0,0 +1,47 @@ + +# Whether or not rebase-merging is enabled on this repository. +# Defaults to `true` +rebaseMergeAllowed: false + +# Whether or not squash-merging is enabled on this repository. +# Defaults to `true` +squashMergeAllowed: true + +# Whether or not PRs are merged with a merge commit on this repository. +# Defaults to `false` +mergeCommitAllowed: false + +# Rules for master branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `master` +- pattern: master + # Can admins overwrite branch protection. + # Defaults to `true` + isAdminEnforced: true + # Number of approving reviews required to update matching branches. + # Defaults to `1` + requiredApprovingReviewCount: 1 + # Are reviews from code owners required to update matching branches. + # Defaults to `false` + requiresCodeOwnerReviews: true + # Require up to date branches + requiresStrictStatusChecks: false + # List of required status check contexts that must pass for commits to be accepted to matching branches. + requiredStatusCheckContexts: + - "dependencies (8)" + - "dependencies (11)" + - "lint" + - "clirr" + - "units (8)" + - "units (11)" + - "Kokoro - Test: Integration" + - "cla/google" +# List of explicit permissions to add (additive only) +permissionRules: +- team: yoshi-admins + permission: admin +- team: yoshi-java-admins + permission: admin +- team: yoshi-java + permission: push diff --git a/.github/trusted-contribution.yml b/.github/trusted-contribution.yml new file mode 100644 index 0000000..a0ba1f7 --- /dev/null +++ b/.github/trusted-contribution.yml @@ -0,0 +1,3 @@ +trustedContributors: +- renovate-bot +- gcf-owl-bot[bot] diff --git a/.github/workflows/approve-readme.yaml b/.github/workflows/approve-readme.yaml new file mode 100644 index 0000000..7513aca --- /dev/null +++ b/.github/workflows/approve-readme.yaml @@ -0,0 +1,54 @@ +on: + pull_request: +name: auto-merge-readme +jobs: + approve: + runs-on: ubuntu-latest + if: github.repository_owner == 'googleapis' && github.head_ref == 'autosynth-readme' + steps: + - uses: actions/github-script@v3 + with: + github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} + script: | + // only approve PRs from yoshi-automation + if (context.payload.pull_request.user.login !== "yoshi-automation") { + return; + } + + // only approve PRs like "chore: release " + if (!context.payload.pull_request.title === "chore: regenerate README") { + return; + } + + // only approve PRs with README.md and synth.metadata changes + const files = new Set( + ( + await github.paginate( + github.pulls.listFiles.endpoint({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number, + }) + ) + ).map(file => file.filename) + ); + if (files.size != 2 || !files.has("README.md") || !files.has(".github/readme/synth.metadata/synth.metadata")) { + return; + } + + // approve README regeneration PR + await github.pulls.createReview({ + owner: context.repo.owner, + repo: context.repo.repo, + body: 'Rubber stamped PR!', + pull_number: context.payload.pull_request.number, + event: 'APPROVE' + }); + + // attach automerge label + await github.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + labels: ['automerge'] + }); diff --git a/.github/workflows/auto-release.yaml b/.github/workflows/auto-release.yaml new file mode 100644 index 0000000..9b4fd4d --- /dev/null +++ b/.github/workflows/auto-release.yaml @@ -0,0 +1,88 @@ +on: + pull_request: +name: auto-release +jobs: + approve: + runs-on: ubuntu-latest + if: contains(github.head_ref, 'release-please') + steps: + - uses: actions/github-script@v3 + with: + github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} + debug: true + script: | + // only approve PRs from release-please[bot] + if (context.payload.pull_request.user.login !== "release-please[bot]") { + return; + } + + // only approve PRs like "chore: release " + if ( !context.payload.pull_request.title.startsWith("chore: release") ) { + return; + } + + // only approve PRs with pom.xml and versions.txt changes + const filesPromise = github.pulls.listFiles.endpoint({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number, + }); + const changed_files = await github.paginate(filesPromise) + + if ( changed_files.length < 1 ) { + console.log( "Not proceeding since PR is empty!" ) + return; + } + + if ( !changed_files.some(v => v.filename.includes("pom")) || !changed_files.some(v => v.filename.includes("versions.txt")) ) { + console.log( "PR file changes do not have pom.xml or versions.txt -- something is wrong. PTAL!" ) + return; + } + + // trigger auto-release when + // 1) it is a SNAPSHOT release (auto-generated post regular release) + // 2) there are dependency updates only + // 3) there are no open dependency update PRs in this repo (to avoid multiple releases) + if ( + context.payload.pull_request.body.includes("Fix") || + context.payload.pull_request.body.includes("Build") || + context.payload.pull_request.body.includes("Documentation") || + context.payload.pull_request.body.includes("BREAKING CHANGES") || + context.payload.pull_request.body.includes("Features") + ) { + console.log( "Not auto-releasing since it is not a dependency-update-only release." ); + return; + } + + const promise = github.pulls.list.endpoint({ + owner: context.repo.owner, + repo: context.repo.repo, + state: 'open' + }); + const open_pulls = await github.paginate(promise) + + if ( open_pulls.length > 1 && !context.payload.pull_request.title.includes("SNAPSHOT") ) { + for ( const pull of open_pulls ) { + if ( pull.title.startsWith("deps: update dependency") ) { + console.log( "Not auto-releasing yet since there are dependency update PRs open in this repo." ); + return; + } + } + } + + // approve release PR + await github.pulls.createReview({ + owner: context.repo.owner, + repo: context.repo.repo, + body: 'Rubber stamped release!', + pull_number: context.payload.pull_request.number, + event: 'APPROVE' + }); + + // attach kokoro:force-run and automerge labels + await github.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + labels: ['kokoro:force-run', 'automerge'] + }); diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..3becb5c --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,81 @@ +on: + push: + branches: + - master + pull_request: +name: ci +jobs: + units: + runs-on: ubuntu-latest + strategy: + matrix: + java: [8, 11] + steps: + - uses: actions/checkout@v2 + - uses: stCarolas/setup-maven@v4 + with: + maven-version: 3.8.1 + - uses: actions/setup-java@v1 + with: + java-version: ${{matrix.java}} + - run: java -version + - run: .kokoro/build.sh + env: + JOB_TYPE: test + windows: + runs-on: windows-latest + steps: + - uses: actions/checkout@v2 + - uses: stCarolas/setup-maven@v4 + with: + maven-version: 3.8.1 + - uses: actions/setup-java@v1 + with: + java-version: 8 + - run: java -version + - run: .kokoro/build.bat + env: + JOB_TYPE: test + dependencies: + runs-on: ubuntu-latest + strategy: + matrix: + java: [8, 11] + steps: + - uses: actions/checkout@v2 + - uses: stCarolas/setup-maven@v4 + with: + maven-version: 3.8.1 + - uses: actions/setup-java@v1 + with: + java-version: ${{matrix.java}} + - run: java -version + - run: .kokoro/dependencies.sh + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: stCarolas/setup-maven@v4 + with: + maven-version: 3.8.1 + - uses: actions/setup-java@v1 + with: + java-version: 8 + - run: java -version + - run: .kokoro/build.sh + env: + JOB_TYPE: lint + clirr: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: stCarolas/setup-maven@v4 + with: + maven-version: 3.8.1 + - uses: actions/setup-java@v1 + with: + java-version: 8 + - run: java -version + - run: .kokoro/build.sh + env: + JOB_TYPE: clirr diff --git a/.github/workflows/samples.yaml b/.github/workflows/samples.yaml new file mode 100644 index 0000000..c46230a --- /dev/null +++ b/.github/workflows/samples.yaml @@ -0,0 +1,14 @@ +on: + pull_request: +name: samples +jobs: + checkstyle: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-java@v1 + with: + java-version: 8 + - name: Run checkstyle + run: mvn -P lint --quiet --batch-mode checkstyle:check + working-directory: samples/snippets diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..069d08f --- /dev/null +++ b/.gitignore @@ -0,0 +1,17 @@ +# Maven +target/ + +# Eclipse +.classpath +.project +.settings + +# Intellij +*.iml +.idea/ + +# python utilities +*.pyc +__pycache__ + +.flattened-pom.xml diff --git a/.kokoro/build.bat b/.kokoro/build.bat new file mode 100644 index 0000000..05826ad --- /dev/null +++ b/.kokoro/build.bat @@ -0,0 +1,3 @@ +:: See documentation in type-shell-output.bat + +"C:\Program Files\Git\bin\bash.exe" %~dp0build.sh diff --git a/.kokoro/build.sh b/.kokoro/build.sh new file mode 100755 index 0000000..2ad3e07 --- /dev/null +++ b/.kokoro/build.sh @@ -0,0 +1,125 @@ +#!/bin/bash +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +## Get the directory of the build script +scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) +## cd to the parent directory, i.e. the root of the git repo +cd ${scriptDir}/.. + +# include common functions +source ${scriptDir}/common.sh + +# Print out Maven & Java version +mvn -version +echo ${JOB_TYPE} + +# attempt to install 3 times with exponential backoff (starting with 10 seconds) +retry_with_backoff 3 10 \ + mvn install -B -V -ntp \ + -DskipTests=true \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -Dmaven.javadoc.skip=true \ + -Dgcloud.download.skip=true \ + -T 1C + +# if GOOGLE_APPLICATION_CREDENTIALS is specified as a relative path, prepend Kokoro root directory onto it +if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" && "${GOOGLE_APPLICATION_CREDENTIALS}" != /* ]]; then + export GOOGLE_APPLICATION_CREDENTIALS=$(realpath ${KOKORO_GFILE_DIR}/${GOOGLE_APPLICATION_CREDENTIALS}) +fi + +RETURN_CODE=0 +set +e + +case ${JOB_TYPE} in +test) + mvn test -B -Dclirr.skip=true -Denforcer.skip=true + RETURN_CODE=$? + ;; +lint) + mvn com.coveo:fmt-maven-plugin:check + RETURN_CODE=$? + ;; +javadoc) + mvn javadoc:javadoc javadoc:test-javadoc + RETURN_CODE=$? + ;; +integration) + mvn -B ${INTEGRATION_TEST_ARGS} \ + -ntp \ + -Penable-integration-tests \ + -DtrimStackTrace=false \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -fae \ + verify + RETURN_CODE=$? + ;; +samples) + SAMPLES_DIR=samples + # only run ITs in snapshot/ on presubmit PRs. run ITs in all 3 samples/ subdirectories otherwise. + if [[ ! -z ${KOKORO_GITHUB_PULL_REQUEST_NUMBER} ]] + then + SAMPLES_DIR=samples/snapshot + fi + + if [[ -f ${SAMPLES_DIR}/pom.xml ]] + then + for FILE in ${KOKORO_GFILE_DIR}/secret_manager/*-samples-secrets; do + [[ -f "$FILE" ]] || continue + source "$FILE" + done + + pushd ${SAMPLES_DIR} + mvn -B \ + -Penable-samples \ + -ntp \ + -DtrimStackTrace=false \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -fae \ + verify + RETURN_CODE=$? + popd + else + echo "no sample pom.xml found - skipping sample tests" + fi + ;; +clirr) + mvn -B -Denforcer.skip=true clirr:check + RETURN_CODE=$? + ;; +*) + ;; +esac + +if [ "${REPORT_COVERAGE}" == "true" ] +then + bash ${KOKORO_GFILE_DIR}/codecov.sh +fi + +# fix output location of logs +bash .kokoro/coerce_logs.sh + +if [[ "${ENABLE_FLAKYBOT}" == "true" ]] +then + chmod +x ${KOKORO_GFILE_DIR}/linux_amd64/flakybot + ${KOKORO_GFILE_DIR}/linux_amd64/flakybot -repo=googleapis/java-bigquerymigration +fi + +echo "exiting with ${RETURN_CODE}" +exit ${RETURN_CODE} diff --git a/.kokoro/coerce_logs.sh b/.kokoro/coerce_logs.sh new file mode 100755 index 0000000..46edbf7 --- /dev/null +++ b/.kokoro/coerce_logs.sh @@ -0,0 +1,37 @@ +#!/bin/bash +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script finds and moves sponge logs so that they can be found by placer +# and are not flagged as flaky by sponge. + +set -eo pipefail + +## Get the directory of the build script +scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) +## cd to the parent directory, i.e. the root of the git repo +cd ${scriptDir}/.. + +job=$(basename ${KOKORO_JOB_NAME}) + +echo "coercing sponge logs..." +for xml in `find . -name *-sponge_log.xml` +do + class=$(basename ${xml} | cut -d- -f2) + dir=$(dirname ${xml})/${job}/${class} + text=$(dirname ${xml})/${class}-sponge_log.txt + mkdir -p ${dir} + mv ${xml} ${dir}/sponge_log.xml + mv ${text} ${dir}/sponge_log.txt +done diff --git a/.kokoro/common.cfg b/.kokoro/common.cfg new file mode 100644 index 0000000..78a953e --- /dev/null +++ b/.kokoro/common.cfg @@ -0,0 +1,13 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Download trampoline resources. These will be in ${KOKORO_GFILE_DIR} +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# All builds use the trampoline script to run in docker. +build_file: "java-bigquerymigration/.kokoro/trampoline.sh" + +# Tell the trampoline which build file to use. +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/build.sh" +} diff --git a/.kokoro/common.sh b/.kokoro/common.sh new file mode 100644 index 0000000..ace89f4 --- /dev/null +++ b/.kokoro/common.sh @@ -0,0 +1,58 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +function retry_with_backoff { + attempts_left=$1 + sleep_seconds=$2 + shift 2 + command=$@ + + + # store current flag state + flags=$- + + # allow a failures to continue + set +e + ${command} + exit_code=$? + + # restore "e" flag + if [[ ${flags} =~ e ]] + then set -e + else set +e + fi + + if [[ $exit_code == 0 ]] + then + return 0 + fi + + # failure + if [[ ${attempts_left} > 0 ]] + then + echo "failure (${exit_code}), sleeping ${sleep_seconds}..." + sleep ${sleep_seconds} + new_attempts=$((${attempts_left} - 1)) + new_sleep=$((${sleep_seconds} * 2)) + retry_with_backoff ${new_attempts} ${new_sleep} ${command} + fi + + return $exit_code +} + +## Helper functionss +function now() { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n'; } +function msg() { println "$*" >&2; } +function println() { printf '%s\n' "$(now) $*"; } \ No newline at end of file diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg new file mode 100644 index 0000000..eb24732 --- /dev/null +++ b/.kokoro/continuous/common.cfg @@ -0,0 +1,25 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + regex: "**/*sponge_log.txt" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "java-bigquerymigration/.kokoro/trampoline.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/build.sh" +} + +env_vars: { + key: "JOB_TYPE" + value: "test" +} diff --git a/.kokoro/continuous/java8.cfg b/.kokoro/continuous/java8.cfg new file mode 100644 index 0000000..495cc7b --- /dev/null +++ b/.kokoro/continuous/java8.cfg @@ -0,0 +1,12 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "REPORT_COVERAGE" + value: "true" +} diff --git a/.kokoro/dependencies.sh b/.kokoro/dependencies.sh new file mode 100755 index 0000000..9030ba8 --- /dev/null +++ b/.kokoro/dependencies.sh @@ -0,0 +1,91 @@ +#!/bin/bash +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail +shopt -s nullglob + +## Get the directory of the build script +scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) +## cd to the parent directory, i.e. the root of the git repo +cd ${scriptDir}/.. + +# include common functions +source ${scriptDir}/common.sh + +# Print out Java +java -version +echo $JOB_TYPE + +export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=128m" + +# this should run maven enforcer +retry_with_backoff 3 10 \ + mvn install -B -V -ntp \ + -DskipTests=true \ + -Dmaven.javadoc.skip=true \ + -Dclirr.skip=true + +mvn -B dependency:analyze -DfailOnWarning=true + +echo "****************** DEPENDENCY LIST COMPLETENESS CHECK *******************" +## Run dependency list completeness check +function completenessCheck() { + # Output dep list with compile scope generated using the original pom + # Running mvn dependency:list on Java versions that support modules will also include the module of the dependency. + # This is stripped from the output as it is not present in the flattened pom. + # Only dependencies with 'compile' or 'runtime' scope are included from original dependency list. + msg "Generating dependency list using original pom..." + mvn dependency:list -f pom.xml -DincludeScope=runtime -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' | sed -e 's/ --.*//' >.org-list.txt + + # Output dep list generated using the flattened pom (only 'compile' and 'runtime' scopes) + msg "Generating dependency list using flattened pom..." + mvn dependency:list -f .flattened-pom.xml -DincludeScope=runtime -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' >.new-list.txt + + # Compare two dependency lists + msg "Comparing dependency lists..." + diff .org-list.txt .new-list.txt >.diff.txt + if [[ $? == 0 ]] + then + msg "Success. No diff!" + else + msg "Diff found. See below: " + msg "You can also check .diff.txt file located in $1." + cat .diff.txt + return 1 + fi +} + +# Allow failures to continue running the script +set +e + +error_count=0 +for path in **/.flattened-pom.xml +do + # Check flattened pom in each dir that contains it for completeness + dir=$(dirname "$path") + pushd "$dir" + completenessCheck "$dir" + error_count=$(($error_count + $?)) + popd +done + +if [[ $error_count == 0 ]] +then + msg "All checks passed." + exit 0 +else + msg "Errors found. See log statements above." + exit 1 +fi diff --git a/.kokoro/nightly/common.cfg b/.kokoro/nightly/common.cfg new file mode 100644 index 0000000..eb24732 --- /dev/null +++ b/.kokoro/nightly/common.cfg @@ -0,0 +1,25 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + regex: "**/*sponge_log.txt" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "java-bigquerymigration/.kokoro/trampoline.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/build.sh" +} + +env_vars: { + key: "JOB_TYPE" + value: "test" +} diff --git a/.kokoro/nightly/integration.cfg b/.kokoro/nightly/integration.cfg new file mode 100644 index 0000000..e51c7b4 --- /dev/null +++ b/.kokoro/nightly/integration.cfg @@ -0,0 +1,37 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "integration" +} +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "true" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} diff --git a/.kokoro/nightly/java11.cfg b/.kokoro/nightly/java11.cfg new file mode 100644 index 0000000..709f2b4 --- /dev/null +++ b/.kokoro/nightly/java11.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java11" +} diff --git a/.kokoro/nightly/java7.cfg b/.kokoro/nightly/java7.cfg new file mode 100644 index 0000000..cb24f44 --- /dev/null +++ b/.kokoro/nightly/java7.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java7" +} diff --git a/.kokoro/nightly/java8-osx.cfg b/.kokoro/nightly/java8-osx.cfg new file mode 100644 index 0000000..e05fbfe --- /dev/null +++ b/.kokoro/nightly/java8-osx.cfg @@ -0,0 +1,3 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +build_file: "java-bigquerymigration/.kokoro/build.sh" diff --git a/.kokoro/nightly/java8-win.cfg b/.kokoro/nightly/java8-win.cfg new file mode 100644 index 0000000..d4f3be4 --- /dev/null +++ b/.kokoro/nightly/java8-win.cfg @@ -0,0 +1,3 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +build_file: "java-bigquerymigration/.kokoro/build.bat" diff --git a/.kokoro/nightly/java8.cfg b/.kokoro/nightly/java8.cfg new file mode 100644 index 0000000..495cc7b --- /dev/null +++ b/.kokoro/nightly/java8.cfg @@ -0,0 +1,12 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "REPORT_COVERAGE" + value: "true" +} diff --git a/.kokoro/nightly/samples.cfg b/.kokoro/nightly/samples.cfg new file mode 100644 index 0000000..9761fd8 --- /dev/null +++ b/.kokoro/nightly/samples.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "samples" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "java-docs-samples-testing" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "java-docs-samples-testing" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-docs-samples-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-docs-samples-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "true" +} diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 0000000..f525142 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/presubmit/clirr.cfg b/.kokoro/presubmit/clirr.cfg new file mode 100644 index 0000000..ec57244 --- /dev/null +++ b/.kokoro/presubmit/clirr.cfg @@ -0,0 +1,13 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. + +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "clirr" +} \ No newline at end of file diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg new file mode 100644 index 0000000..4425a4a --- /dev/null +++ b/.kokoro/presubmit/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + regex: "**/*sponge_log.txt" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "java-bigquerymigration/.kokoro/trampoline.sh" + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/build.sh" +} + +env_vars: { + key: "JOB_TYPE" + value: "test" +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "dpebot_codecov_token" + } + } +} diff --git a/.kokoro/presubmit/dependencies.cfg b/.kokoro/presubmit/dependencies.cfg new file mode 100644 index 0000000..8dc082e --- /dev/null +++ b/.kokoro/presubmit/dependencies.cfg @@ -0,0 +1,12 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/dependencies.sh" +} diff --git a/.kokoro/presubmit/integration.cfg b/.kokoro/presubmit/integration.cfg new file mode 100644 index 0000000..dded67a --- /dev/null +++ b/.kokoro/presubmit/integration.cfg @@ -0,0 +1,33 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "integration" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} diff --git a/.kokoro/presubmit/java11.cfg b/.kokoro/presubmit/java11.cfg new file mode 100644 index 0000000..709f2b4 --- /dev/null +++ b/.kokoro/presubmit/java11.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java11" +} diff --git a/.kokoro/presubmit/java7.cfg b/.kokoro/presubmit/java7.cfg new file mode 100644 index 0000000..cb24f44 --- /dev/null +++ b/.kokoro/presubmit/java7.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java7" +} diff --git a/.kokoro/presubmit/java8-osx.cfg b/.kokoro/presubmit/java8-osx.cfg new file mode 100644 index 0000000..e05fbfe --- /dev/null +++ b/.kokoro/presubmit/java8-osx.cfg @@ -0,0 +1,3 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +build_file: "java-bigquerymigration/.kokoro/build.sh" diff --git a/.kokoro/presubmit/java8-win.cfg b/.kokoro/presubmit/java8-win.cfg new file mode 100644 index 0000000..d4f3be4 --- /dev/null +++ b/.kokoro/presubmit/java8-win.cfg @@ -0,0 +1,3 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +build_file: "java-bigquerymigration/.kokoro/build.bat" diff --git a/.kokoro/presubmit/java8.cfg b/.kokoro/presubmit/java8.cfg new file mode 100644 index 0000000..495cc7b --- /dev/null +++ b/.kokoro/presubmit/java8.cfg @@ -0,0 +1,12 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "REPORT_COVERAGE" + value: "true" +} diff --git a/.kokoro/presubmit/linkage-monitor.cfg b/.kokoro/presubmit/linkage-monitor.cfg new file mode 100644 index 0000000..dc9a30d --- /dev/null +++ b/.kokoro/presubmit/linkage-monitor.cfg @@ -0,0 +1,12 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/linkage-monitor.sh" +} \ No newline at end of file diff --git a/.kokoro/presubmit/lint.cfg b/.kokoro/presubmit/lint.cfg new file mode 100644 index 0000000..6d323c8 --- /dev/null +++ b/.kokoro/presubmit/lint.cfg @@ -0,0 +1,13 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. + +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "lint" +} \ No newline at end of file diff --git a/.kokoro/presubmit/samples.cfg b/.kokoro/presubmit/samples.cfg new file mode 100644 index 0000000..01e0960 --- /dev/null +++ b/.kokoro/presubmit/samples.cfg @@ -0,0 +1,33 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "samples" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "java-docs-samples-testing" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "java-docs-samples-testing" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-docs-samples-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-docs-samples-service-account" +} \ No newline at end of file diff --git a/.kokoro/readme.sh b/.kokoro/readme.sh new file mode 100755 index 0000000..7a9b1ee --- /dev/null +++ b/.kokoro/readme.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +cd ${KOKORO_ARTIFACTS_DIR}/github/java-bigquerymigration + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Kokoro exposes this as a file, but the scripts expect just a plain variable. +export GITHUB_TOKEN=$(cat ${KOKORO_KEYSTORE_DIR}/73713_yoshi-automation-github-key) + +# Setup git credentials +echo "https://${GITHUB_TOKEN}:@github.com" >> ~/.git-credentials +git config --global credential.helper 'store --file ~/.git-credentials' + +python3.6 -m pip install git+https://github.com/googleapis/synthtool.git#egg=gcp-synthtool + +set +e +python3.6 -m autosynth.synth \ + --repository=googleapis/java-bigquerymigration \ + --synth-file-name=.github/readme/synth.py \ + --metadata-path=.github/readme/synth.metadata \ + --pr-title="chore: regenerate README" \ + --branch-suffix="readme" + +# autosynth returns 28 to signal there are no changes +RETURN_CODE=$? +if [[ ${RETURN_CODE} -ne 0 && ${RETURN_CODE} -ne 28 ]] +then + exit ${RETURN_CODE} +fi diff --git a/.kokoro/release/bump_snapshot.cfg b/.kokoro/release/bump_snapshot.cfg new file mode 100644 index 0000000..8f37609 --- /dev/null +++ b/.kokoro/release/bump_snapshot.cfg @@ -0,0 +1,53 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "java-bigquerymigration/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/bump_snapshot.sh" +} + +# tokens used by release-please to keep an up-to-date release PR. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "github-magic-proxy-key-release-please" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "github-magic-proxy-token-release-please" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "github-magic-proxy-url-release-please" + } + } +} diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg new file mode 100644 index 0000000..a4d2068 --- /dev/null +++ b/.kokoro/release/common.cfg @@ -0,0 +1,49 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "java-bigquerymigration/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 70247 + keyname: "maven-gpg-keyring" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 70247 + keyname: "maven-gpg-passphrase" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 70247 + keyname: "maven-gpg-pubkeyring" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 70247 + keyname: "sonatype-credentials" + } + } +} diff --git a/.kokoro/release/common.sh b/.kokoro/release/common.sh new file mode 100755 index 0000000..7f78ee4 --- /dev/null +++ b/.kokoro/release/common.sh @@ -0,0 +1,50 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Get secrets from keystore and set and environment variables +setup_environment_secrets() { + export GPG_PASSPHRASE=$(cat ${KOKORO_KEYSTORE_DIR}/70247_maven-gpg-passphrase) + export GPG_TTY=$(tty) + export GPG_HOMEDIR=/gpg + mkdir $GPG_HOMEDIR + mv ${KOKORO_KEYSTORE_DIR}/70247_maven-gpg-pubkeyring $GPG_HOMEDIR/pubring.gpg + mv ${KOKORO_KEYSTORE_DIR}/70247_maven-gpg-keyring $GPG_HOMEDIR/secring.gpg + export SONATYPE_USERNAME=$(cat ${KOKORO_KEYSTORE_DIR}/70247_sonatype-credentials | cut -f1 -d'|') + export SONATYPE_PASSWORD=$(cat ${KOKORO_KEYSTORE_DIR}/70247_sonatype-credentials | cut -f2 -d'|') +} + +create_settings_xml_file() { + echo " + + + ossrh + ${SONATYPE_USERNAME} + ${SONATYPE_PASSWORD} + + + sonatype-nexus-staging + ${SONATYPE_USERNAME} + ${SONATYPE_PASSWORD} + + + sonatype-nexus-snapshots + ${SONATYPE_USERNAME} + ${SONATYPE_PASSWORD} + + +" > $1 +} \ No newline at end of file diff --git a/.kokoro/release/drop.cfg b/.kokoro/release/drop.cfg new file mode 100644 index 0000000..d9e97c0 --- /dev/null +++ b/.kokoro/release/drop.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/drop.sh" +} diff --git a/.kokoro/release/drop.sh b/.kokoro/release/drop.sh new file mode 100755 index 0000000..742ec1a --- /dev/null +++ b/.kokoro/release/drop.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# STAGING_REPOSITORY_ID must be set +if [ -z "${STAGING_REPOSITORY_ID}" ]; then + echo "Missing STAGING_REPOSITORY_ID environment variable" + exit 1 +fi + +source $(dirname "$0")/common.sh +pushd $(dirname "$0")/../../ + +setup_environment_secrets +create_settings_xml_file "settings.xml" + +mvn nexus-staging:drop -B \ + --settings=settings.xml \ + -DstagingRepositoryId=${STAGING_REPOSITORY_ID} diff --git a/.kokoro/release/promote.cfg b/.kokoro/release/promote.cfg new file mode 100644 index 0000000..5b6fba7 --- /dev/null +++ b/.kokoro/release/promote.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/promote.sh" +} diff --git a/.kokoro/release/promote.sh b/.kokoro/release/promote.sh new file mode 100755 index 0000000..3cac3d8 --- /dev/null +++ b/.kokoro/release/promote.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# STAGING_REPOSITORY_ID must be set +if [ -z "${STAGING_REPOSITORY_ID}" ]; then + echo "Missing STAGING_REPOSITORY_ID environment variable" + exit 1 +fi + +source $(dirname "$0")/common.sh + +pushd $(dirname "$0")/../../ + +setup_environment_secrets +create_settings_xml_file "settings.xml" + +mvn nexus-staging:release -B \ + -DperformRelease=true \ + --settings=settings.xml \ + -DstagingRepositoryId=${STAGING_REPOSITORY_ID} diff --git a/.kokoro/release/publish_javadoc.cfg b/.kokoro/release/publish_javadoc.cfg new file mode 100644 index 0000000..93664a9 --- /dev/null +++ b/.kokoro/release/publish_javadoc.cfg @@ -0,0 +1,23 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/doc-templates/" + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/publish_javadoc.sh" +} + + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} diff --git a/.kokoro/release/publish_javadoc.sh b/.kokoro/release/publish_javadoc.sh new file mode 100755 index 0000000..f194cd7 --- /dev/null +++ b/.kokoro/release/publish_javadoc.sh @@ -0,0 +1,53 @@ +#!/bin/bash +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +if [[ -z "${CREDENTIALS}" ]]; then + CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account +fi + +if [[ -z "${STAGING_BUCKET}" ]]; then + echo "Need to set STAGING_BUCKET environment variable" + exit 1 +fi + +# work from the git root directory +pushd $(dirname "$0")/../../ + +# install docuploader package +python3 -m pip install gcp-docuploader + +# compile all packages +mvn clean install -B -q -DskipTests=true + +export NAME=google-cloud-bigquerymigration +export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3) + +# build the docs +mvn site -B -q + +pushd target/site/apidocs + +# create metadata +python3 -m docuploader create-metadata \ + --name ${NAME} \ + --version ${VERSION} \ + --language java + +# upload docs +python3 -m docuploader upload . \ + --credentials ${CREDENTIALS} \ + --staging-bucket ${STAGING_BUCKET} diff --git a/.kokoro/release/publish_javadoc11.cfg b/.kokoro/release/publish_javadoc11.cfg new file mode 100644 index 0000000..771afff --- /dev/null +++ b/.kokoro/release/publish_javadoc11.cfg @@ -0,0 +1,30 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# cloud-rad production +env_vars: { + key: "STAGING_BUCKET_V2" + value: "docs-staging-v2" +} + +# Configure the docker image for kokoro-trampoline +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java11" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/publish_javadoc11.sh" +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} + +# Downloads docfx doclet resource. This will be in ${KOKORO_GFILE_DIR}/ +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/docfx" diff --git a/.kokoro/release/publish_javadoc11.sh b/.kokoro/release/publish_javadoc11.sh new file mode 100755 index 0000000..c20b536 --- /dev/null +++ b/.kokoro/release/publish_javadoc11.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +if [[ -z "${CREDENTIALS}" ]]; then + CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account +fi + +if [[ -z "${STAGING_BUCKET_V2}" ]]; then + echo "Need to set STAGING_BUCKET_V2 environment variable" + exit 1 +fi + +# work from the git root directory +pushd $(dirname "$0")/../../ + +# install docuploader package +python3 -m pip install gcp-docuploader + +# compile all packages +mvn clean install -B -q -DskipTests=true + +export NAME=google-cloud-bigquerymigration +export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3) + +# V3 generates docfx yml from javadoc +# generate yml +mvn clean site -B -q -P docFX + +# copy README to docfx-yml dir and rename index.md +cp README.md target/docfx-yml/index.md +# copy CHANGELOG to docfx-yml dir and rename history.md +cp CHANGELOG.md target/docfx-yml/history.md + +pushd target/docfx-yml + +# create metadata +python3 -m docuploader create-metadata \ + --name ${NAME} \ + --version ${VERSION} \ + --xrefs devsite://java/gax \ + --xrefs devsite://java/google-cloud-core \ + --xrefs devsite://java/api-common \ + --xrefs devsite://java/proto-google-common-protos \ + --xrefs devsite://java/google-api-client \ + --xrefs devsite://java/google-http-client \ + --xrefs devsite://java/protobuf \ + --language java + +# upload yml to production bucket +python3 -m docuploader upload . \ + --credentials ${CREDENTIALS} \ + --staging-bucket ${STAGING_BUCKET_V2} \ + --destination-prefix docfx diff --git a/.kokoro/release/snapshot.cfg b/.kokoro/release/snapshot.cfg new file mode 100644 index 0000000..818b947 --- /dev/null +++ b/.kokoro/release/snapshot.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/snapshot.sh" +} \ No newline at end of file diff --git a/.kokoro/release/snapshot.sh b/.kokoro/release/snapshot.sh new file mode 100755 index 0000000..1f55b77 --- /dev/null +++ b/.kokoro/release/snapshot.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +source $(dirname "$0")/common.sh +MAVEN_SETTINGS_FILE=$(realpath $(dirname "$0")/../../)/settings.xml +pushd $(dirname "$0")/../../ + +# ensure we're trying to push a snapshot (no-result returns non-zero exit code) +grep SNAPSHOT versions.txt + +setup_environment_secrets +create_settings_xml_file "settings.xml" + +mvn clean deploy -B \ + --settings ${MAVEN_SETTINGS_FILE} \ + -DperformRelease=true \ + -Dgpg.executable=gpg \ + -Dgpg.passphrase=${GPG_PASSPHRASE} \ + -Dgpg.homedir=${GPG_HOMEDIR} diff --git a/.kokoro/release/stage.cfg b/.kokoro/release/stage.cfg new file mode 100644 index 0000000..6750651 --- /dev/null +++ b/.kokoro/release/stage.cfg @@ -0,0 +1,19 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerymigration/.kokoro/release/stage.sh" +} + +# Need to save the properties file +action { + define_artifacts { + regex: "github/java-bigquerymigration/target/nexus-staging/staging/*.properties" + strip_prefix: "github/java-bigquerymigration" + } +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} diff --git a/.kokoro/release/stage.sh b/.kokoro/release/stage.sh new file mode 100755 index 0000000..77dc4e8 --- /dev/null +++ b/.kokoro/release/stage.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +source $(dirname "$0")/common.sh +source $(dirname "$0")/../common.sh +MAVEN_SETTINGS_FILE=$(realpath $(dirname "$0")/../../)/settings.xml +pushd $(dirname "$0")/../../ + +setup_environment_secrets +create_settings_xml_file "settings.xml" + +# attempt to stage 3 times with exponential backoff (starting with 10 seconds) +retry_with_backoff 3 10 \ + mvn clean deploy -B \ + --settings ${MAVEN_SETTINGS_FILE} \ + -DskipTests=true \ + -DperformRelease=true \ + -Dgpg.executable=gpg \ + -Dgpg.passphrase=${GPG_PASSPHRASE} \ + -Dgpg.homedir=${GPG_HOMEDIR} + +if [[ -n "${AUTORELEASE_PR}" ]] +then + mvn nexus-staging:release -B \ + -DperformRelease=true \ + --settings=settings.xml +fi \ No newline at end of file diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh new file mode 100644 index 0000000..8b69b79 --- /dev/null +++ b/.kokoro/trampoline.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +set -eo pipefail +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT + +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" diff --git a/.repo-metadata.json b/.repo-metadata.json new file mode 100644 index 0000000..d393c82 --- /dev/null +++ b/.repo-metadata.json @@ -0,0 +1,16 @@ +{ + "name": "bigquerymigration", + "name_pretty": "BigQuery Migration", + "product_documentation": "https://cloud.google.com/bigquery/docs", + "api_description": "BigQuery Migration API", + "client_documentation": "https://googleapis.dev/java/google-cloud-bigquerymigration/latest/index.html", + "release_level": "alpha", + "transport": "grpc", + "language": "java", + "repo": "googleapis/java-bigquerymigration", + "repo_short": "java-bigquerymigration", + "distribution_name": "com.google.cloud:google-cloud-bigquerymigration", + "api_id": "bigquerymigration.googleapis.com", + "library_type": "GAPIC_AUTO", + "requires_billing": true +} \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..2add254 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,94 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..f2dbdee --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,139 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows +[Google's Open Source Community Guidelines](https://opensource.google.com/conduct/). + +## Building the project + +To build, package, and run all unit tests run the command + +``` +mvn clean verify +``` + +### Running Integration tests + +To include integration tests when building the project, you need access to +a GCP Project with a valid service account. + +For instructions on how to generate a service account and corresponding +credentials JSON see: [Creating a Service Account][1]. + +Then run the following to build, package, run all unit tests and run all +integration tests. + +```bash +export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service/account.json +mvn -Penable-integration-tests clean verify +``` + +## Code Samples + +Code Samples must be bundled in separate Maven modules, and guarded by a +Maven profile with the name `enable-samples`. + +The samples must be separate from the primary project for a few reasons: +1. Primary projects have a minimum Java version of Java 7 whereas samples have + a minimum Java version of Java 8. Due to this we need the ability to + selectively exclude samples from a build run. +2. Many code samples depend on external GCP services and need + credentials to access the service. +3. Code samples are not released as Maven artifacts and must be excluded from + release builds. + +### Building + +```bash +mvn -Penable-samples clean verify +``` + +Some samples require access to GCP services and require a service account: + +```bash +export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service/account.json +mvn -Penable-samples clean verify +``` + +### Profile Config + +1. To add samples in a profile to your Maven project, add the following to your +`pom.xml` + + ```xml + + [...] + + + enable-samples + + sample + + + + [...] + + ``` + +2. [Activate](#profile-activation) the profile. +3. Define your samples in a normal Maven project in the `samples/` directory. + +### Code Formatting + +Code in this repo is formatted with +[google-java-format](https://github.com/google/google-java-format). +To run formatting on your project, you can run: +``` +mvn com.coveo:fmt-maven-plugin:format +``` + +### Profile Activation + +To include code samples when building and testing the project, enable the +`enable-samples` Maven profile. + +#### Command line + +To activate the Maven profile on the command line add `-Penable-samples` to your +Maven command. + +#### Maven `settings.xml` + +To activate the Maven profile in your `~/.m2/settings.xml` add an entry of +`enable-samples` following the instructions in [Active Profiles][2]. + +This method has the benefit of applying to all projects you build (and is +respected by IntelliJ IDEA) and is recommended if you are going to be +contributing samples to several projects. + +#### IntelliJ IDEA + +To activate the Maven Profile inside IntelliJ IDEA, follow the instructions in +[Activate Maven profiles][3] to activate `enable-samples`. + +[1]: https://cloud.google.com/docs/authentication/getting-started#creating_a_service_account +[2]: https://maven.apache.org/settings.html#Active_Profiles +[3]: https://www.jetbrains.com/help/idea/work-with-maven-profiles.html#activate_maven_profiles diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..af3016c --- /dev/null +++ b/README.md @@ -0,0 +1,202 @@ +# Google BigQuery Migration Client for Java + +Java idiomatic client for [BigQuery Migration][product-docs]. + +[![Maven][maven-version-image]][maven-version-link] +![Stability][stability-image] + +- [Product Documentation][product-docs] +- [Client Library Documentation][javadocs] + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + + +## Quickstart + + +If you are using Maven, add this to your pom.xml file: + + +```xml + + com.google.cloud + google-cloud-bigquerymigration + 0.0.0 + +``` + +If you are using Gradle without BOM, add this to your dependencies + +```Groovy +implementation 'com.google.cloud:google-cloud-bigquerymigration:0.0.0' +``` + +If you are using SBT, add this to your dependencies + +```Scala +libraryDependencies += "com.google.cloud" % "google-cloud-bigquerymigration" % "0.0.0" +``` + +## Authentication + +See the [Authentication][authentication] section in the base directory's README. + +## Authorization + +The client application making API calls must be granted [authorization scopes][auth-scopes] required for the desired BigQuery Migration APIs, and the authenticated principal must have the [IAM role(s)][predefined-iam-roles] required to access GCP resources using the BigQuery Migration API calls. + +## Getting Started + +### Prerequisites + +You will need a [Google Cloud Platform Console][developer-console] project with the BigQuery Migration [API enabled][enable-api]. +You will need to [enable billing][enable-billing] to use Google BigQuery Migration. +[Follow these instructions][create-project] to get your project set up. You will also need to set up the local development environment by +[installing the Google Cloud SDK][cloud-sdk] and running the following commands in command line: +`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +### Installation and setup + +You'll need to obtain the `google-cloud-bigquerymigration` library. See the [Quickstart](#quickstart) section +to add `google-cloud-bigquerymigration` as a dependency in your code. + +## About BigQuery Migration + + +[BigQuery Migration][product-docs] BigQuery Migration API + +See the [BigQuery Migration client library docs][javadocs] to learn how to +use this BigQuery Migration Client Library. + + + + + + +## Troubleshooting + +To get help, follow the instructions in the [shared Troubleshooting document][troubleshooting]. + +## Transport + +BigQuery Migration uses gRPC for the transport layer. + +## Supported Java Versions + +Java 7 or above is required for using this client. + +Google's Java client libraries, +[Google Cloud Client Libraries][cloudlibs] +and +[Google Cloud API Libraries][apilibs], +follow the +[Oracle Java SE support roadmap][oracle] +(see the Oracle Java SE Product Releases section). + +### For new development + +In general, new feature development occurs with support for the lowest Java +LTS version covered by Oracle's Premier Support (which typically lasts 5 years +from initial General Availability). If the minimum required JVM for a given +library is changed, it is accompanied by a [semver][semver] major release. + +Java 11 and (in September 2021) Java 17 are the best choices for new +development. + +### Keeping production systems current + +Google tests its client libraries with all current LTS versions covered by +Oracle's Extended Support (which typically lasts 8 years from initial +General Availability). + +#### Legacy support + +Google's client libraries support legacy versions of Java runtimes with long +term stable libraries that don't receive feature updates on a best efforts basis +as it may not be possible to backport all patches. + +Google provides updates on a best efforts basis to apps that continue to use +Java 7, though apps might need to upgrade to current versions of the library +that supports their JVM. + +#### Where to find specific information + +The latest versions and the supported Java versions are identified on +the individual GitHub repository `github.com/GoogleAPIs/java-SERVICENAME` +and on [google-cloud-java][g-c-j]. + +## Versioning + + +This library follows [Semantic Versioning](http://semver.org/). + + +It is currently in major version zero (``0.y.z``), which means that anything may change at any time +and the public API should not be considered stable. + + +## Contributing + + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING][contributing] for more information how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in +this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more +information. + + +## License + +Apache 2.0 - See [LICENSE][license] for more information. + +## CI Status + +Java Version | Status +------------ | ------ +Java 7 | [![Kokoro CI][kokoro-badge-image-1]][kokoro-badge-link-1] +Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] +Java 8 OSX | [![Kokoro CI][kokoro-badge-image-3]][kokoro-badge-link-3] +Java 8 Windows | [![Kokoro CI][kokoro-badge-image-4]][kokoro-badge-link-4] +Java 11 | [![Kokoro CI][kokoro-badge-image-5]][kokoro-badge-link-5] + +Java is a registered trademark of Oracle and/or its affiliates. + +[product-docs]: https://cloud.google.com/bigquery/docs +[javadocs]: https://googleapis.dev/java/google-cloud-bigquerymigration/latest/index.html +[kokoro-badge-image-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java7.svg +[kokoro-badge-link-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java7.html +[kokoro-badge-image-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java8.svg +[kokoro-badge-link-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java8.html +[kokoro-badge-image-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java8-osx.svg +[kokoro-badge-link-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java8-osx.html +[kokoro-badge-image-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java8-win.svg +[kokoro-badge-link-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java8-win.html +[kokoro-badge-image-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java11.svg +[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerymigration/java11.html +[stability-image]: https://img.shields.io/badge/stability-alpha-orange +[maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquerymigration.svg +[maven-version-link]: https://search.maven.org/search?q=g:com.google.cloud%20AND%20a:google-cloud-bigquerymigration&core=gav +[authentication]: https://github.com/googleapis/google-cloud-java#authentication +[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes +[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles +[iam-policy]: https://cloud.google.com/iam/docs/overview#cloud-iam-policy +[developer-console]: https://console.developers.google.com/ +[create-project]: https://cloud.google.com/resource-manager/docs/creating-managing-projects +[cloud-sdk]: https://cloud.google.com/sdk/ +[troubleshooting]: https://github.com/googleapis/google-cloud-common/blob/master/troubleshooting/readme.md#troubleshooting +[contributing]: https://github.com/googleapis/java-bigquerymigration/blob/master/CONTRIBUTING.md +[code-of-conduct]: https://github.com/googleapis/java-bigquerymigration/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct +[license]: https://github.com/googleapis/java-bigquerymigration/blob/master/LICENSE +[enable-billing]: https://cloud.google.com/apis/docs/getting-started#enabling_billing +[enable-api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquerymigration.googleapis.com +[libraries-bom]: https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google-Cloud-Platform-Libraries-BOM +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png + +[semver]: https://semver.org/ +[cloudlibs]: https://cloud.google.com/apis/docs/client-libraries-explained +[apilibs]: https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries +[oracle]: https://www.oracle.com/java/technologies/java-se-support-roadmap.html +[g-c-j]: http://github.com/googleapis/google-cloud-java diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..8b58ae9 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/codecov.yaml b/codecov.yaml new file mode 100644 index 0000000..5724ea9 --- /dev/null +++ b/codecov.yaml @@ -0,0 +1,4 @@ +--- +codecov: + ci: + - source.cloud.google.com diff --git a/google-cloud-bigquerymigration-bom/pom.xml b/google-cloud-bigquerymigration-bom/pom.xml new file mode 100644 index 0000000..b5f58e1 --- /dev/null +++ b/google-cloud-bigquerymigration-bom/pom.xml @@ -0,0 +1,94 @@ + + + 4.0.0 + com.google.cloud + google-cloud-bigquerymigration-bom + 0.0.1-SNAPSHOT + pom + + com.google.cloud + google-cloud-shared-config + 1.0.0 + + + Google BigQuery Migration BOM + https://github.com/googleapis/java-bigquerymigration + + BOM for BigQuery Migration + + + + Google LLC + + + + + chingor13 + Jeff Ching + chingor@google.com + Google LLC + + Developer + + + + neenushaji + Neenu Shaji + neenushaji@google.com + Google LLC + + Developer + + + + + + scm:git:https://github.com/googleapis/java-bigquerymigration.git + scm:git:git@github.com:googleapis/java-bigquerymigration.git + https://github.com/googleapis/java-bigquerymigration + + + + true + + + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + + + com.google.cloud + google-cloud-bigquerymigration + 0.0.1-SNAPSHOT + + + com.google.api.grpc + grpc-google-cloud-bigquerymigration-v2alpha + 0.0.1-SNAPSHOT + + + com.google.api.grpc + proto-google-cloud-bigquerymigration-v2alpha + 0.0.1-SNAPSHOT + + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + + + + + diff --git a/google-cloud-bigquerymigration/pom.xml b/google-cloud-bigquerymigration/pom.xml new file mode 100644 index 0000000..3bddbb5 --- /dev/null +++ b/google-cloud-bigquerymigration/pom.xml @@ -0,0 +1,111 @@ + + + 4.0.0 + com.google.cloud + google-cloud-bigquerymigration + 0.0.1-SNAPSHOT + jar + Google BigQuery Migration + https://github.com/googleapis/java-bigquerymigration + BigQuery Migration BigQuery Migration API + + com.google.cloud + google-cloud-bigquerymigration-parent + 0.0.1-SNAPSHOT + + + google-cloud-bigquerymigration + + + + io.grpc + grpc-api + + + io.grpc + grpc-stub + + + io.grpc + grpc-protobuf + + + com.google.api + api-common + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-common-protos + + + + com.google.api.grpc + proto-google-cloud-bigquerymigration-v2alpha + + + com.google.guava + guava + + + com.google.api + gax + + + com.google.api + gax-grpc + + + org.threeten + threetenbp + + + + + junit + junit + test + 4.13.2 + + + + com.google.api.grpc + grpc-google-cloud-bigquerymigration-v2alpha + test + + + + com.google.api + gax-grpc + testlib + test + + + + + + java9 + + [9,) + + + + javax.annotation + javax.annotation-api + + + + + + + + + org.codehaus.mojo + flatten-maven-plugin + + + + \ No newline at end of file diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceClient.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceClient.java new file mode 100644 index 0000000..0182596 --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceClient.java @@ -0,0 +1,1173 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import com.google.api.core.ApiFuture; +import com.google.api.core.ApiFutures; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.paging.AbstractFixedSizeCollection; +import com.google.api.gax.paging.AbstractPage; +import com.google.api.gax.paging.AbstractPagedListResponse; +import com.google.api.gax.rpc.PageContext; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.migration.v2alpha.stub.MigrationServiceStub; +import com.google.cloud.bigquery.migration.v2alpha.stub.MigrationServiceStubSettings; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Service Description: Service to handle EDW migrations. + * + *

This class provides the ability to make remote calls to the backing service through method + * calls that map to API methods. Sample code to get started: + * + *

{@code
+ * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build();
+ *   MigrationWorkflow response =
+ *       migrationServiceClient.createMigrationWorkflow(parent, migrationWorkflow);
+ * }
+ * }
+ * + *

Note: close() needs to be called on the MigrationServiceClient object to clean up resources + * such as threads. In the example above, try-with-resources is used, which automatically calls + * close(). + * + *

The surface of this class includes several types of Java methods for each of the API's + * methods: + * + *

    + *
  1. A "flattened" method. With this type of method, the fields of the request type have been + * converted into function parameters. It may be the case that not all fields are available as + * parameters, and not every API method will have a flattened method entry point. + *
  2. A "request object" method. This type of method only takes one parameter, a request object, + * which must be constructed before the call. Not every API method will have a request object + * method. + *
  3. A "callable" method. This type of method takes no parameters and returns an immutable API + * callable object, which can be used to initiate calls to the service. + *
+ * + *

See the individual methods for example code. + * + *

Many parameters require resource names to be formatted in a particular way. To assist with + * these names, this class includes a format method for each type of name, and additionally a parse + * method to extract the individual identifiers contained within names that are returned. + * + *

This class can be customized by passing in a custom instance of MigrationServiceSettings to + * create(). For example: + * + *

To customize credentials: + * + *

{@code
+ * MigrationServiceSettings migrationServiceSettings =
+ *     MigrationServiceSettings.newBuilder()
+ *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
+ *         .build();
+ * MigrationServiceClient migrationServiceClient =
+ *     MigrationServiceClient.create(migrationServiceSettings);
+ * }
+ * + *

To customize the endpoint: + * + *

{@code
+ * MigrationServiceSettings migrationServiceSettings =
+ *     MigrationServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
+ * MigrationServiceClient migrationServiceClient =
+ *     MigrationServiceClient.create(migrationServiceSettings);
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class MigrationServiceClient implements BackgroundResource { + private final MigrationServiceSettings settings; + private final MigrationServiceStub stub; + + /** Constructs an instance of MigrationServiceClient with default settings. */ + public static final MigrationServiceClient create() throws IOException { + return create(MigrationServiceSettings.newBuilder().build()); + } + + /** + * Constructs an instance of MigrationServiceClient, using the given settings. The channels are + * created based on the settings passed in, or defaults for any settings that are not set. + */ + public static final MigrationServiceClient create(MigrationServiceSettings settings) + throws IOException { + return new MigrationServiceClient(settings); + } + + /** + * Constructs an instance of MigrationServiceClient, using the given stub for making calls. This + * is for advanced usage - prefer using create(MigrationServiceSettings). + */ + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + public static final MigrationServiceClient create(MigrationServiceStub stub) { + return new MigrationServiceClient(stub); + } + + /** + * Constructs an instance of MigrationServiceClient, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ + protected MigrationServiceClient(MigrationServiceSettings settings) throws IOException { + this.settings = settings; + this.stub = ((MigrationServiceStubSettings) settings.getStubSettings()).createStub(); + } + + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + protected MigrationServiceClient(MigrationServiceStub stub) { + this.settings = null; + this.stub = stub; + } + + public final MigrationServiceSettings getSettings() { + return settings; + } + + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + public MigrationServiceStub getStub() { + return stub; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build();
+   *   MigrationWorkflow response =
+   *       migrationServiceClient.createMigrationWorkflow(parent, migrationWorkflow);
+   * }
+   * }
+ * + * @param parent Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param migrationWorkflow Required. The migration workflow to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationWorkflow createMigrationWorkflow( + LocationName parent, MigrationWorkflow migrationWorkflow) { + CreateMigrationWorkflowRequest request = + CreateMigrationWorkflowRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setMigrationWorkflow(migrationWorkflow) + .build(); + return createMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build();
+   *   MigrationWorkflow response =
+   *       migrationServiceClient.createMigrationWorkflow(parent, migrationWorkflow);
+   * }
+   * }
+ * + * @param parent Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param migrationWorkflow Required. The migration workflow to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationWorkflow createMigrationWorkflow( + String parent, MigrationWorkflow migrationWorkflow) { + CreateMigrationWorkflowRequest request = + CreateMigrationWorkflowRequest.newBuilder() + .setParent(parent) + .setMigrationWorkflow(migrationWorkflow) + .build(); + return createMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   CreateMigrationWorkflowRequest request =
+   *       CreateMigrationWorkflowRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setMigrationWorkflow(MigrationWorkflow.newBuilder().build())
+   *           .build();
+   *   MigrationWorkflow response = migrationServiceClient.createMigrationWorkflow(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationWorkflow createMigrationWorkflow(CreateMigrationWorkflowRequest request) { + return createMigrationWorkflowCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   CreateMigrationWorkflowRequest request =
+   *       CreateMigrationWorkflowRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setMigrationWorkflow(MigrationWorkflow.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.createMigrationWorkflowCallable().futureCall(request);
+   *   // Do something.
+   *   MigrationWorkflow response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable + createMigrationWorkflowCallable() { + return stub.createMigrationWorkflowCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   MigrationWorkflowName name =
+   *       MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]");
+   *   MigrationWorkflow response = migrationServiceClient.getMigrationWorkflow(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration workflow. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationWorkflow getMigrationWorkflow(MigrationWorkflowName name) { + GetMigrationWorkflowRequest request = + GetMigrationWorkflowRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); + return getMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String name = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString();
+   *   MigrationWorkflow response = migrationServiceClient.getMigrationWorkflow(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration workflow. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationWorkflow getMigrationWorkflow(String name) { + GetMigrationWorkflowRequest request = + GetMigrationWorkflowRequest.newBuilder().setName(name).build(); + return getMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   GetMigrationWorkflowRequest request =
+   *       GetMigrationWorkflowRequest.newBuilder()
+   *           .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   MigrationWorkflow response = migrationServiceClient.getMigrationWorkflow(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationWorkflow getMigrationWorkflow(GetMigrationWorkflowRequest request) { + return getMigrationWorkflowCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   GetMigrationWorkflowRequest request =
+   *       GetMigrationWorkflowRequest.newBuilder()
+   *           .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.getMigrationWorkflowCallable().futureCall(request);
+   *   // Do something.
+   *   MigrationWorkflow response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable + getMigrationWorkflowCallable() { + return stub.getMigrationWorkflowCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (MigrationWorkflow element :
+   *       migrationServiceClient.listMigrationWorkflows(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The project and location of the migration workflows to list. Example: + * `projects/123/locations/us` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListMigrationWorkflowsPagedResponse listMigrationWorkflows(LocationName parent) { + ListMigrationWorkflowsRequest request = + ListMigrationWorkflowsRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .build(); + return listMigrationWorkflows(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (MigrationWorkflow element :
+   *       migrationServiceClient.listMigrationWorkflows(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The project and location of the migration workflows to list. Example: + * `projects/123/locations/us` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListMigrationWorkflowsPagedResponse listMigrationWorkflows(String parent) { + ListMigrationWorkflowsRequest request = + ListMigrationWorkflowsRequest.newBuilder().setParent(parent).build(); + return listMigrationWorkflows(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   ListMigrationWorkflowsRequest request =
+   *       ListMigrationWorkflowsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   for (MigrationWorkflow element :
+   *       migrationServiceClient.listMigrationWorkflows(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListMigrationWorkflowsPagedResponse listMigrationWorkflows( + ListMigrationWorkflowsRequest request) { + return listMigrationWorkflowsPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   ListMigrationWorkflowsRequest request =
+   *       ListMigrationWorkflowsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.listMigrationWorkflowsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (MigrationWorkflow element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable + listMigrationWorkflowsPagedCallable() { + return stub.listMigrationWorkflowsPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration workflow. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   ListMigrationWorkflowsRequest request =
+   *       ListMigrationWorkflowsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   while (true) {
+   *     ListMigrationWorkflowsResponse response =
+   *         migrationServiceClient.listMigrationWorkflowsCallable().call(request);
+   *     for (MigrationWorkflow element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable + listMigrationWorkflowsCallable() { + return stub.listMigrationWorkflowsCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes a migration workflow by name. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   MigrationWorkflowName name =
+   *       MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]");
+   *   migrationServiceClient.deleteMigrationWorkflow(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration workflow. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteMigrationWorkflow(MigrationWorkflowName name) { + DeleteMigrationWorkflowRequest request = + DeleteMigrationWorkflowRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); + deleteMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes a migration workflow by name. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String name = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString();
+   *   migrationServiceClient.deleteMigrationWorkflow(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration workflow. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteMigrationWorkflow(String name) { + DeleteMigrationWorkflowRequest request = + DeleteMigrationWorkflowRequest.newBuilder().setName(name).build(); + deleteMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes a migration workflow by name. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   DeleteMigrationWorkflowRequest request =
+   *       DeleteMigrationWorkflowRequest.newBuilder()
+   *           .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .build();
+   *   migrationServiceClient.deleteMigrationWorkflow(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteMigrationWorkflow(DeleteMigrationWorkflowRequest request) { + deleteMigrationWorkflowCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes a migration workflow by name. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   DeleteMigrationWorkflowRequest request =
+   *       DeleteMigrationWorkflowRequest.newBuilder()
+   *           .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.deleteMigrationWorkflowCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
+ */ + public final UnaryCallable + deleteMigrationWorkflowCallable() { + return stub.deleteMigrationWorkflowCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Starts a previously created migration workflow. I.e., the state transitions from DRAFT to + * RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the + * state is anything other than DRAFT or RUNNING. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   MigrationWorkflowName name =
+   *       MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]");
+   *   migrationServiceClient.startMigrationWorkflow(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration workflow. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void startMigrationWorkflow(MigrationWorkflowName name) { + StartMigrationWorkflowRequest request = + StartMigrationWorkflowRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); + startMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Starts a previously created migration workflow. I.e., the state transitions from DRAFT to + * RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the + * state is anything other than DRAFT or RUNNING. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String name = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString();
+   *   migrationServiceClient.startMigrationWorkflow(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration workflow. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void startMigrationWorkflow(String name) { + StartMigrationWorkflowRequest request = + StartMigrationWorkflowRequest.newBuilder().setName(name).build(); + startMigrationWorkflow(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Starts a previously created migration workflow. I.e., the state transitions from DRAFT to + * RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the + * state is anything other than DRAFT or RUNNING. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   StartMigrationWorkflowRequest request =
+   *       StartMigrationWorkflowRequest.newBuilder()
+   *           .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .build();
+   *   migrationServiceClient.startMigrationWorkflow(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void startMigrationWorkflow(StartMigrationWorkflowRequest request) { + startMigrationWorkflowCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Starts a previously created migration workflow. I.e., the state transitions from DRAFT to + * RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the + * state is anything other than DRAFT or RUNNING. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   StartMigrationWorkflowRequest request =
+   *       StartMigrationWorkflowRequest.newBuilder()
+   *           .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.startMigrationWorkflowCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
+ */ + public final UnaryCallable + startMigrationWorkflowCallable() { + return stub.startMigrationWorkflowCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration subtask. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   MigrationSubtaskName name =
+   *       MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]");
+   *   MigrationSubtask response = migrationServiceClient.getMigrationSubtask(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration subtask. Example: + * `projects/123/locations/us/workflows/1234/subtasks/543` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationSubtask getMigrationSubtask(MigrationSubtaskName name) { + GetMigrationSubtaskRequest request = + GetMigrationSubtaskRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); + return getMigrationSubtask(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration subtask. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String name =
+   *       MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]").toString();
+   *   MigrationSubtask response = migrationServiceClient.getMigrationSubtask(name);
+   * }
+   * }
+ * + * @param name Required. The unique identifier for the migration subtask. Example: + * `projects/123/locations/us/workflows/1234/subtasks/543` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationSubtask getMigrationSubtask(String name) { + GetMigrationSubtaskRequest request = + GetMigrationSubtaskRequest.newBuilder().setName(name).build(); + return getMigrationSubtask(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration subtask. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   GetMigrationSubtaskRequest request =
+   *       GetMigrationSubtaskRequest.newBuilder()
+   *           .setName(
+   *               MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]")
+   *                   .toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   MigrationSubtask response = migrationServiceClient.getMigrationSubtask(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final MigrationSubtask getMigrationSubtask(GetMigrationSubtaskRequest request) { + return getMigrationSubtaskCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets a previously created migration subtask. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   GetMigrationSubtaskRequest request =
+   *       GetMigrationSubtaskRequest.newBuilder()
+   *           .setName(
+   *               MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]")
+   *                   .toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.getMigrationSubtaskCallable().futureCall(request);
+   *   // Do something.
+   *   MigrationSubtask response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable + getMigrationSubtaskCallable() { + return stub.getMigrationSubtaskCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration subtasks. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   MigrationWorkflowName parent =
+   *       MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]");
+   *   for (MigrationSubtask element :
+   *       migrationServiceClient.listMigrationSubtasks(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The migration task of the subtasks to list. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListMigrationSubtasksPagedResponse listMigrationSubtasks( + MigrationWorkflowName parent) { + ListMigrationSubtasksRequest request = + ListMigrationSubtasksRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .build(); + return listMigrationSubtasks(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration subtasks. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   String parent = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString();
+   *   for (MigrationSubtask element :
+   *       migrationServiceClient.listMigrationSubtasks(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The migration task of the subtasks to list. Example: + * `projects/123/locations/us/workflows/1234` + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListMigrationSubtasksPagedResponse listMigrationSubtasks(String parent) { + ListMigrationSubtasksRequest request = + ListMigrationSubtasksRequest.newBuilder().setParent(parent).build(); + return listMigrationSubtasks(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration subtasks. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   ListMigrationSubtasksRequest request =
+   *       ListMigrationSubtasksRequest.newBuilder()
+   *           .setParent(
+   *               MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setFilter("filter-1274492040")
+   *           .build();
+   *   for (MigrationSubtask element :
+   *       migrationServiceClient.listMigrationSubtasks(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListMigrationSubtasksPagedResponse listMigrationSubtasks( + ListMigrationSubtasksRequest request) { + return listMigrationSubtasksPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration subtasks. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   ListMigrationSubtasksRequest request =
+   *       ListMigrationSubtasksRequest.newBuilder()
+   *           .setParent(
+   *               MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setFilter("filter-1274492040")
+   *           .build();
+   *   ApiFuture future =
+   *       migrationServiceClient.listMigrationSubtasksPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (MigrationSubtask element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable + listMigrationSubtasksPagedCallable() { + return stub.listMigrationSubtasksPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Lists previously created migration subtasks. + * + *

Sample code: + * + *

{@code
+   * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+   *   ListMigrationSubtasksRequest request =
+   *       ListMigrationSubtasksRequest.newBuilder()
+   *           .setParent(
+   *               MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString())
+   *           .setReadMask(FieldMask.newBuilder().build())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .setFilter("filter-1274492040")
+   *           .build();
+   *   while (true) {
+   *     ListMigrationSubtasksResponse response =
+   *         migrationServiceClient.listMigrationSubtasksCallable().call(request);
+   *     for (MigrationSubtask element : response.getResponsesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable + listMigrationSubtasksCallable() { + return stub.listMigrationSubtasksCallable(); + } + + @Override + public final void close() { + stub.close(); + } + + @Override + public void shutdown() { + stub.shutdown(); + } + + @Override + public boolean isShutdown() { + return stub.isShutdown(); + } + + @Override + public boolean isTerminated() { + return stub.isTerminated(); + } + + @Override + public void shutdownNow() { + stub.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return stub.awaitTermination(duration, unit); + } + + public static class ListMigrationWorkflowsPagedResponse + extends AbstractPagedListResponse< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + MigrationWorkflow, + ListMigrationWorkflowsPage, + ListMigrationWorkflowsFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse, MigrationWorkflow> + context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListMigrationWorkflowsPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, + input -> new ListMigrationWorkflowsPagedResponse(input), + MoreExecutors.directExecutor()); + } + + private ListMigrationWorkflowsPagedResponse(ListMigrationWorkflowsPage page) { + super(page, ListMigrationWorkflowsFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListMigrationWorkflowsPage + extends AbstractPage< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + MigrationWorkflow, + ListMigrationWorkflowsPage> { + + private ListMigrationWorkflowsPage( + PageContext< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse, MigrationWorkflow> + context, + ListMigrationWorkflowsResponse response) { + super(context, response); + } + + private static ListMigrationWorkflowsPage createEmptyPage() { + return new ListMigrationWorkflowsPage(null, null); + } + + @Override + protected ListMigrationWorkflowsPage createPage( + PageContext< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse, MigrationWorkflow> + context, + ListMigrationWorkflowsResponse response) { + return new ListMigrationWorkflowsPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse, MigrationWorkflow> + context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListMigrationWorkflowsFixedSizeCollection + extends AbstractFixedSizeCollection< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + MigrationWorkflow, + ListMigrationWorkflowsPage, + ListMigrationWorkflowsFixedSizeCollection> { + + private ListMigrationWorkflowsFixedSizeCollection( + List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListMigrationWorkflowsFixedSizeCollection createEmptyCollection() { + return new ListMigrationWorkflowsFixedSizeCollection(null, 0); + } + + @Override + protected ListMigrationWorkflowsFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListMigrationWorkflowsFixedSizeCollection(pages, collectionSize); + } + } + + public static class ListMigrationSubtasksPagedResponse + extends AbstractPagedListResponse< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + MigrationSubtask, + ListMigrationSubtasksPage, + ListMigrationSubtasksFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext + context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListMigrationSubtasksPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, + input -> new ListMigrationSubtasksPagedResponse(input), + MoreExecutors.directExecutor()); + } + + private ListMigrationSubtasksPagedResponse(ListMigrationSubtasksPage page) { + super(page, ListMigrationSubtasksFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListMigrationSubtasksPage + extends AbstractPage< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + MigrationSubtask, + ListMigrationSubtasksPage> { + + private ListMigrationSubtasksPage( + PageContext + context, + ListMigrationSubtasksResponse response) { + super(context, response); + } + + private static ListMigrationSubtasksPage createEmptyPage() { + return new ListMigrationSubtasksPage(null, null); + } + + @Override + protected ListMigrationSubtasksPage createPage( + PageContext + context, + ListMigrationSubtasksResponse response) { + return new ListMigrationSubtasksPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext + context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListMigrationSubtasksFixedSizeCollection + extends AbstractFixedSizeCollection< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + MigrationSubtask, + ListMigrationSubtasksPage, + ListMigrationSubtasksFixedSizeCollection> { + + private ListMigrationSubtasksFixedSizeCollection( + List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListMigrationSubtasksFixedSizeCollection createEmptyCollection() { + return new ListMigrationSubtasksFixedSizeCollection(null, 0); + } + + @Override + protected ListMigrationSubtasksFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListMigrationSubtasksFixedSizeCollection(pages, collectionSize); + } + } +} diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceSettings.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceSettings.java new file mode 100644 index 0000000..9b96f7c --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceSettings.java @@ -0,0 +1,273 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationSubtasksPagedResponse; +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationWorkflowsPagedResponse; + +import com.google.api.core.ApiFunction; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.GoogleCredentialsProvider; +import com.google.api.gax.core.InstantiatingExecutorProvider; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.ClientSettings; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.cloud.bigquery.migration.v2alpha.stub.MigrationServiceStubSettings; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Settings class to configure an instance of {@link MigrationServiceClient}. + * + *

The default instance has everything set to sensible defaults: + * + *

    + *
  • The default service address (bigquerymigration.googleapis.com) and default port (443) are + * used. + *
  • Credentials are acquired automatically through Application Default Credentials. + *
  • Retries are configured for idempotent methods but not for non-idempotent methods. + *
+ * + *

The builder of this class is recursive, so contained classes are themselves builders. When + * build() is called, the tree of builders is called to create the complete settings object. + * + *

For example, to set the total timeout of createMigrationWorkflow to 30 seconds: + * + *

{@code
+ * MigrationServiceSettings.Builder migrationServiceSettingsBuilder =
+ *     MigrationServiceSettings.newBuilder();
+ * migrationServiceSettingsBuilder
+ *     .createMigrationWorkflowSettings()
+ *     .setRetrySettings(
+ *         migrationServiceSettingsBuilder
+ *             .createMigrationWorkflowSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
+ *             .setTotalTimeout(Duration.ofSeconds(30))
+ *             .build());
+ * MigrationServiceSettings migrationServiceSettings = migrationServiceSettingsBuilder.build();
+ * }
+ */ +@BetaApi +@Generated("by gapic-generator-java") +public class MigrationServiceSettings extends ClientSettings { + + /** Returns the object with the settings used for calls to createMigrationWorkflow. */ + public UnaryCallSettings + createMigrationWorkflowSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).createMigrationWorkflowSettings(); + } + + /** Returns the object with the settings used for calls to getMigrationWorkflow. */ + public UnaryCallSettings + getMigrationWorkflowSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).getMigrationWorkflowSettings(); + } + + /** Returns the object with the settings used for calls to listMigrationWorkflows. */ + public PagedCallSettings< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + listMigrationWorkflowsSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).listMigrationWorkflowsSettings(); + } + + /** Returns the object with the settings used for calls to deleteMigrationWorkflow. */ + public UnaryCallSettings + deleteMigrationWorkflowSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).deleteMigrationWorkflowSettings(); + } + + /** Returns the object with the settings used for calls to startMigrationWorkflow. */ + public UnaryCallSettings startMigrationWorkflowSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).startMigrationWorkflowSettings(); + } + + /** Returns the object with the settings used for calls to getMigrationSubtask. */ + public UnaryCallSettings + getMigrationSubtaskSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).getMigrationSubtaskSettings(); + } + + /** Returns the object with the settings used for calls to listMigrationSubtasks. */ + public PagedCallSettings< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + listMigrationSubtasksSettings() { + return ((MigrationServiceStubSettings) getStubSettings()).listMigrationSubtasksSettings(); + } + + public static final MigrationServiceSettings create(MigrationServiceStubSettings stub) + throws IOException { + return new MigrationServiceSettings.Builder(stub.toBuilder()).build(); + } + + /** Returns a builder for the default ExecutorProvider for this service. */ + public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { + return MigrationServiceStubSettings.defaultExecutorProviderBuilder(); + } + + /** Returns the default service endpoint. */ + public static String getDefaultEndpoint() { + return MigrationServiceStubSettings.getDefaultEndpoint(); + } + + /** Returns the default service scopes. */ + public static List getDefaultServiceScopes() { + return MigrationServiceStubSettings.getDefaultServiceScopes(); + } + + /** Returns a builder for the default credentials for this service. */ + public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { + return MigrationServiceStubSettings.defaultCredentialsProviderBuilder(); + } + + /** Returns a builder for the default ChannelProvider for this service. */ + public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { + return MigrationServiceStubSettings.defaultGrpcTransportProviderBuilder(); + } + + public static TransportChannelProvider defaultTransportChannelProvider() { + return MigrationServiceStubSettings.defaultTransportChannelProvider(); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { + return MigrationServiceStubSettings.defaultApiClientHeaderProviderBuilder(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder() { + return Builder.createDefault(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder(ClientContext clientContext) { + return new Builder(clientContext); + } + + /** Returns a builder containing all the values of this settings class. */ + public Builder toBuilder() { + return new Builder(this); + } + + protected MigrationServiceSettings(Builder settingsBuilder) throws IOException { + super(settingsBuilder); + } + + /** Builder for MigrationServiceSettings. */ + public static class Builder extends ClientSettings.Builder { + + protected Builder() throws IOException { + this(((ClientContext) null)); + } + + protected Builder(ClientContext clientContext) { + super(MigrationServiceStubSettings.newBuilder(clientContext)); + } + + protected Builder(MigrationServiceSettings settings) { + super(settings.getStubSettings().toBuilder()); + } + + protected Builder(MigrationServiceStubSettings.Builder stubSettings) { + super(stubSettings); + } + + private static Builder createDefault() { + return new Builder(MigrationServiceStubSettings.newBuilder()); + } + + public MigrationServiceStubSettings.Builder getStubSettingsBuilder() { + return ((MigrationServiceStubSettings.Builder) getStubSettings()); + } + + /** + * Applies the given settings updater function to all of the unary API methods in this service. + * + *

Note: This method does not support applying settings to streaming methods. + */ + public Builder applyToAllUnaryMethods( + ApiFunction, Void> settingsUpdater) { + super.applyToAllUnaryMethods( + getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); + return this; + } + + /** Returns the builder for the settings used for calls to createMigrationWorkflow. */ + public UnaryCallSettings.Builder + createMigrationWorkflowSettings() { + return getStubSettingsBuilder().createMigrationWorkflowSettings(); + } + + /** Returns the builder for the settings used for calls to getMigrationWorkflow. */ + public UnaryCallSettings.Builder + getMigrationWorkflowSettings() { + return getStubSettingsBuilder().getMigrationWorkflowSettings(); + } + + /** Returns the builder for the settings used for calls to listMigrationWorkflows. */ + public PagedCallSettings.Builder< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + listMigrationWorkflowsSettings() { + return getStubSettingsBuilder().listMigrationWorkflowsSettings(); + } + + /** Returns the builder for the settings used for calls to deleteMigrationWorkflow. */ + public UnaryCallSettings.Builder + deleteMigrationWorkflowSettings() { + return getStubSettingsBuilder().deleteMigrationWorkflowSettings(); + } + + /** Returns the builder for the settings used for calls to startMigrationWorkflow. */ + public UnaryCallSettings.Builder + startMigrationWorkflowSettings() { + return getStubSettingsBuilder().startMigrationWorkflowSettings(); + } + + /** Returns the builder for the settings used for calls to getMigrationSubtask. */ + public UnaryCallSettings.Builder + getMigrationSubtaskSettings() { + return getStubSettingsBuilder().getMigrationSubtaskSettings(); + } + + /** Returns the builder for the settings used for calls to listMigrationSubtasks. */ + public PagedCallSettings.Builder< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + listMigrationSubtasksSettings() { + return getStubSettingsBuilder().listMigrationSubtasksSettings(); + } + + @Override + public MigrationServiceSettings build() throws IOException { + return new MigrationServiceSettings(this); + } + } +} diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/gapic_metadata.json b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..fb5ebce --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/gapic_metadata.json @@ -0,0 +1,39 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "java", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "com.google.cloud.bigquery.migration.v2alpha", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": ["createMigrationWorkflow", "createMigrationWorkflow", "createMigrationWorkflow", "createMigrationWorkflowCallable"] + }, + "DeleteMigrationWorkflow": { + "methods": ["deleteMigrationWorkflow", "deleteMigrationWorkflow", "deleteMigrationWorkflow", "deleteMigrationWorkflowCallable"] + }, + "GetMigrationSubtask": { + "methods": ["getMigrationSubtask", "getMigrationSubtask", "getMigrationSubtask", "getMigrationSubtaskCallable"] + }, + "GetMigrationWorkflow": { + "methods": ["getMigrationWorkflow", "getMigrationWorkflow", "getMigrationWorkflow", "getMigrationWorkflowCallable"] + }, + "ListMigrationSubtasks": { + "methods": ["listMigrationSubtasks", "listMigrationSubtasks", "listMigrationSubtasks", "listMigrationSubtasksPagedCallable", "listMigrationSubtasksCallable"] + }, + "ListMigrationWorkflows": { + "methods": ["listMigrationWorkflows", "listMigrationWorkflows", "listMigrationWorkflows", "listMigrationWorkflowsPagedCallable", "listMigrationWorkflowsCallable"] + }, + "StartMigrationWorkflow": { + "methods": ["startMigrationWorkflow", "startMigrationWorkflow", "startMigrationWorkflow", "startMigrationWorkflowCallable"] + } + } + } + } + } + } +} \ No newline at end of file diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/package-info.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/package-info.java new file mode 100644 index 0000000..f03b79a --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/package-info.java @@ -0,0 +1,38 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * The interfaces provided are listed below, along with usage samples. + * + *

======================= MigrationServiceClient ======================= + * + *

Service Description: Service to handle EDW migrations. + * + *

Sample for MigrationServiceClient: + * + *

{@code
+ * try (MigrationServiceClient migrationServiceClient = MigrationServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build();
+ *   MigrationWorkflow response =
+ *       migrationServiceClient.createMigrationWorkflow(parent, migrationWorkflow);
+ * }
+ * }
+ */ +@Generated("by gapic-generator-java") +package com.google.cloud.bigquery.migration.v2alpha; + +import javax.annotation.Generated; diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/GrpcMigrationServiceCallableFactory.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/GrpcMigrationServiceCallableFactory.java new file mode 100644 index 0000000..170e016 --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/GrpcMigrationServiceCallableFactory.java @@ -0,0 +1,115 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha.stub; + +import com.google.api.core.BetaApi; +import com.google.api.gax.grpc.GrpcCallSettings; +import com.google.api.gax.grpc.GrpcCallableFactory; +import com.google.api.gax.grpc.GrpcStubCallableFactory; +import com.google.api.gax.rpc.BatchingCallSettings; +import com.google.api.gax.rpc.BidiStreamingCallable; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.ClientStreamingCallable; +import com.google.api.gax.rpc.OperationCallSettings; +import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallable; +import com.google.api.gax.rpc.StreamingCallSettings; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; +import com.google.longrunning.stub.OperationsStub; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * gRPC callable factory implementation for the MigrationService service API. + * + *

This class is for advanced usage. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class GrpcMigrationServiceCallableFactory implements GrpcStubCallableFactory { + + @Override + public UnaryCallable createUnaryCallable( + GrpcCallSettings grpcCallSettings, + UnaryCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createUnaryCallable(grpcCallSettings, callSettings, clientContext); + } + + @Override + public + UnaryCallable createPagedCallable( + GrpcCallSettings grpcCallSettings, + PagedCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + } + + @Override + public UnaryCallable createBatchingCallable( + GrpcCallSettings grpcCallSettings, + BatchingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createBatchingCallable( + grpcCallSettings, callSettings, clientContext); + } + + @Override + public + OperationCallable createOperationCallable( + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, + ClientContext clientContext, + OperationsStub operationsStub) { + return GrpcCallableFactory.createOperationCallable( + grpcCallSettings, callSettings, clientContext, operationsStub); + } + + @Override + public + BidiStreamingCallable createBidiStreamingCallable( + GrpcCallSettings grpcCallSettings, + StreamingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createBidiStreamingCallable( + grpcCallSettings, callSettings, clientContext); + } + + @Override + public + ServerStreamingCallable createServerStreamingCallable( + GrpcCallSettings grpcCallSettings, + ServerStreamingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createServerStreamingCallable( + grpcCallSettings, callSettings, clientContext); + } + + @Override + public + ClientStreamingCallable createClientStreamingCallable( + GrpcCallSettings grpcCallSettings, + StreamingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createClientStreamingCallable( + grpcCallSettings, callSettings, clientContext); + } +} diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/GrpcMigrationServiceStub.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/GrpcMigrationServiceStub.java new file mode 100644 index 0000000..e10ad7f --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/GrpcMigrationServiceStub.java @@ -0,0 +1,420 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha.stub; + +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationSubtasksPagedResponse; +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationWorkflowsPagedResponse; + +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.core.BackgroundResourceAggregation; +import com.google.api.gax.grpc.GrpcCallSettings; +import com.google.api.gax.grpc.GrpcStubCallableFactory; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest; +import com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse; +import com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask; +import com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow; +import com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest; +import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; +import com.google.protobuf.Empty; +import io.grpc.MethodDescriptor; +import io.grpc.protobuf.ProtoUtils; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * gRPC stub implementation for the MigrationService service API. + * + *

This class is for advanced usage and reflects the underlying API directly. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class GrpcMigrationServiceStub extends MigrationServiceStub { + private static final MethodDescriptor + createMigrationWorkflowMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow") + .setRequestMarshaller( + ProtoUtils.marshaller(CreateMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(MigrationWorkflow.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + getMigrationWorkflowMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow") + .setRequestMarshaller( + ProtoUtils.marshaller(GetMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(MigrationWorkflow.getDefaultInstance())) + .build(); + + private static final MethodDescriptor< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse> + listMigrationWorkflowsMethodDescriptor = + MethodDescriptor + .newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows") + .setRequestMarshaller( + ProtoUtils.marshaller(ListMigrationWorkflowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(ListMigrationWorkflowsResponse.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + deleteMigrationWorkflowMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow") + .setRequestMarshaller( + ProtoUtils.marshaller(DeleteMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + startMigrationWorkflowMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow") + .setRequestMarshaller( + ProtoUtils.marshaller(StartMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + getMigrationSubtaskMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask") + .setRequestMarshaller( + ProtoUtils.marshaller(GetMigrationSubtaskRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(MigrationSubtask.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + listMigrationSubtasksMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks") + .setRequestMarshaller( + ProtoUtils.marshaller(ListMigrationSubtasksRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(ListMigrationSubtasksResponse.getDefaultInstance())) + .build(); + + private final UnaryCallable + createMigrationWorkflowCallable; + private final UnaryCallable + getMigrationWorkflowCallable; + private final UnaryCallable + listMigrationWorkflowsCallable; + private final UnaryCallable + listMigrationWorkflowsPagedCallable; + private final UnaryCallable + deleteMigrationWorkflowCallable; + private final UnaryCallable startMigrationWorkflowCallable; + private final UnaryCallable + getMigrationSubtaskCallable; + private final UnaryCallable + listMigrationSubtasksCallable; + private final UnaryCallable + listMigrationSubtasksPagedCallable; + + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; + private final GrpcStubCallableFactory callableFactory; + + public static final GrpcMigrationServiceStub create(MigrationServiceStubSettings settings) + throws IOException { + return new GrpcMigrationServiceStub(settings, ClientContext.create(settings)); + } + + public static final GrpcMigrationServiceStub create(ClientContext clientContext) + throws IOException { + return new GrpcMigrationServiceStub( + MigrationServiceStubSettings.newBuilder().build(), clientContext); + } + + public static final GrpcMigrationServiceStub create( + ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { + return new GrpcMigrationServiceStub( + MigrationServiceStubSettings.newBuilder().build(), clientContext, callableFactory); + } + + /** + * Constructs an instance of GrpcMigrationServiceStub, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ + protected GrpcMigrationServiceStub( + MigrationServiceStubSettings settings, ClientContext clientContext) throws IOException { + this(settings, clientContext, new GrpcMigrationServiceCallableFactory()); + } + + /** + * Constructs an instance of GrpcMigrationServiceStub, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ + protected GrpcMigrationServiceStub( + MigrationServiceStubSettings settings, + ClientContext clientContext, + GrpcStubCallableFactory callableFactory) + throws IOException { + this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); + + GrpcCallSettings + createMigrationWorkflowTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createMigrationWorkflowMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings + getMigrationWorkflowTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getMigrationWorkflowMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings + listMigrationWorkflowsTransportSettings = + GrpcCallSettings + .newBuilder() + .setMethodDescriptor(listMigrationWorkflowsMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings + deleteMigrationWorkflowTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(deleteMigrationWorkflowMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings startMigrationWorkflowTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(startMigrationWorkflowMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings + getMigrationSubtaskTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getMigrationSubtaskMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings + listMigrationSubtasksTransportSettings = + GrpcCallSettings + .newBuilder() + .setMethodDescriptor(listMigrationSubtasksMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + + this.createMigrationWorkflowCallable = + callableFactory.createUnaryCallable( + createMigrationWorkflowTransportSettings, + settings.createMigrationWorkflowSettings(), + clientContext); + this.getMigrationWorkflowCallable = + callableFactory.createUnaryCallable( + getMigrationWorkflowTransportSettings, + settings.getMigrationWorkflowSettings(), + clientContext); + this.listMigrationWorkflowsCallable = + callableFactory.createUnaryCallable( + listMigrationWorkflowsTransportSettings, + settings.listMigrationWorkflowsSettings(), + clientContext); + this.listMigrationWorkflowsPagedCallable = + callableFactory.createPagedCallable( + listMigrationWorkflowsTransportSettings, + settings.listMigrationWorkflowsSettings(), + clientContext); + this.deleteMigrationWorkflowCallable = + callableFactory.createUnaryCallable( + deleteMigrationWorkflowTransportSettings, + settings.deleteMigrationWorkflowSettings(), + clientContext); + this.startMigrationWorkflowCallable = + callableFactory.createUnaryCallable( + startMigrationWorkflowTransportSettings, + settings.startMigrationWorkflowSettings(), + clientContext); + this.getMigrationSubtaskCallable = + callableFactory.createUnaryCallable( + getMigrationSubtaskTransportSettings, + settings.getMigrationSubtaskSettings(), + clientContext); + this.listMigrationSubtasksCallable = + callableFactory.createUnaryCallable( + listMigrationSubtasksTransportSettings, + settings.listMigrationSubtasksSettings(), + clientContext); + this.listMigrationSubtasksPagedCallable = + callableFactory.createPagedCallable( + listMigrationSubtasksTransportSettings, + settings.listMigrationSubtasksSettings(), + clientContext); + + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; + } + + @Override + public UnaryCallable + createMigrationWorkflowCallable() { + return createMigrationWorkflowCallable; + } + + @Override + public UnaryCallable + getMigrationWorkflowCallable() { + return getMigrationWorkflowCallable; + } + + @Override + public UnaryCallable + listMigrationWorkflowsCallable() { + return listMigrationWorkflowsCallable; + } + + @Override + public UnaryCallable + listMigrationWorkflowsPagedCallable() { + return listMigrationWorkflowsPagedCallable; + } + + @Override + public UnaryCallable deleteMigrationWorkflowCallable() { + return deleteMigrationWorkflowCallable; + } + + @Override + public UnaryCallable startMigrationWorkflowCallable() { + return startMigrationWorkflowCallable; + } + + @Override + public UnaryCallable getMigrationSubtaskCallable() { + return getMigrationSubtaskCallable; + } + + @Override + public UnaryCallable + listMigrationSubtasksCallable() { + return listMigrationSubtasksCallable; + } + + @Override + public UnaryCallable + listMigrationSubtasksPagedCallable() { + return listMigrationSubtasksPagedCallable; + } + + @Override + public final void close() { + try { + backgroundResources.close(); + } catch (RuntimeException e) { + throw e; + } catch (Exception e) { + throw new IllegalStateException("Failed to close resource", e); + } + } + + @Override + public void shutdown() { + backgroundResources.shutdown(); + } + + @Override + public boolean isShutdown() { + return backgroundResources.isShutdown(); + } + + @Override + public boolean isTerminated() { + return backgroundResources.isTerminated(); + } + + @Override + public void shutdownNow() { + backgroundResources.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return backgroundResources.awaitTermination(duration, unit); + } +} diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/MigrationServiceStub.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/MigrationServiceStub.java new file mode 100644 index 0000000..5c5846b --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/MigrationServiceStub.java @@ -0,0 +1,95 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha.stub; + +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationSubtasksPagedResponse; +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationWorkflowsPagedResponse; + +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest; +import com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse; +import com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask; +import com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow; +import com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest; +import com.google.protobuf.Empty; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Base stub class for the MigrationService service API. + * + *

This class is for advanced usage and reflects the underlying API directly. + */ +@BetaApi +@Generated("by gapic-generator-java") +public abstract class MigrationServiceStub implements BackgroundResource { + + public UnaryCallable + createMigrationWorkflowCallable() { + throw new UnsupportedOperationException("Not implemented: createMigrationWorkflowCallable()"); + } + + public UnaryCallable + getMigrationWorkflowCallable() { + throw new UnsupportedOperationException("Not implemented: getMigrationWorkflowCallable()"); + } + + public UnaryCallable + listMigrationWorkflowsPagedCallable() { + throw new UnsupportedOperationException( + "Not implemented: listMigrationWorkflowsPagedCallable()"); + } + + public UnaryCallable + listMigrationWorkflowsCallable() { + throw new UnsupportedOperationException("Not implemented: listMigrationWorkflowsCallable()"); + } + + public UnaryCallable deleteMigrationWorkflowCallable() { + throw new UnsupportedOperationException("Not implemented: deleteMigrationWorkflowCallable()"); + } + + public UnaryCallable startMigrationWorkflowCallable() { + throw new UnsupportedOperationException("Not implemented: startMigrationWorkflowCallable()"); + } + + public UnaryCallable getMigrationSubtaskCallable() { + throw new UnsupportedOperationException("Not implemented: getMigrationSubtaskCallable()"); + } + + public UnaryCallable + listMigrationSubtasksPagedCallable() { + throw new UnsupportedOperationException( + "Not implemented: listMigrationSubtasksPagedCallable()"); + } + + public UnaryCallable + listMigrationSubtasksCallable() { + throw new UnsupportedOperationException("Not implemented: listMigrationSubtasksCallable()"); + } + + @Override + public abstract void close(); +} diff --git a/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/MigrationServiceStubSettings.java b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/MigrationServiceStubSettings.java new file mode 100644 index 0000000..6c269ad --- /dev/null +++ b/google-cloud-bigquerymigration/src/main/java/com/google/cloud/bigquery/migration/v2alpha/stub/MigrationServiceStubSettings.java @@ -0,0 +1,628 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha.stub; + +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationSubtasksPagedResponse; +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationWorkflowsPagedResponse; + +import com.google.api.core.ApiFunction; +import com.google.api.core.ApiFuture; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.GaxProperties; +import com.google.api.gax.core.GoogleCredentialsProvider; +import com.google.api.gax.core.InstantiatingExecutorProvider; +import com.google.api.gax.grpc.GaxGrpcProperties; +import com.google.api.gax.grpc.GrpcTransportChannel; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.rpc.ApiCallContext; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.PageContext; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.PagedListDescriptor; +import com.google.api.gax.rpc.PagedListResponseFactory; +import com.google.api.gax.rpc.StatusCode; +import com.google.api.gax.rpc.StubSettings; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest; +import com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest; +import com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse; +import com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask; +import com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow; +import com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import javax.annotation.Generated; +import org.threeten.bp.Duration; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Settings class to configure an instance of {@link MigrationServiceStub}. + * + *

The default instance has everything set to sensible defaults: + * + *

    + *
  • The default service address (bigquerymigration.googleapis.com) and default port (443) are + * used. + *
  • Credentials are acquired automatically through Application Default Credentials. + *
  • Retries are configured for idempotent methods but not for non-idempotent methods. + *
+ * + *

The builder of this class is recursive, so contained classes are themselves builders. When + * build() is called, the tree of builders is called to create the complete settings object. + * + *

For example, to set the total timeout of createMigrationWorkflow to 30 seconds: + * + *

{@code
+ * MigrationServiceStubSettings.Builder migrationServiceSettingsBuilder =
+ *     MigrationServiceStubSettings.newBuilder();
+ * migrationServiceSettingsBuilder
+ *     .createMigrationWorkflowSettings()
+ *     .setRetrySettings(
+ *         migrationServiceSettingsBuilder
+ *             .createMigrationWorkflowSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
+ *             .setTotalTimeout(Duration.ofSeconds(30))
+ *             .build());
+ * MigrationServiceStubSettings migrationServiceSettings = migrationServiceSettingsBuilder.build();
+ * }
+ */ +@BetaApi +@Generated("by gapic-generator-java") +public class MigrationServiceStubSettings extends StubSettings { + /** The default scopes of the service. */ + private static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder().add("https://www.googleapis.com/auth/cloud-platform").build(); + + private final UnaryCallSettings + createMigrationWorkflowSettings; + private final UnaryCallSettings + getMigrationWorkflowSettings; + private final PagedCallSettings< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + listMigrationWorkflowsSettings; + private final UnaryCallSettings + deleteMigrationWorkflowSettings; + private final UnaryCallSettings + startMigrationWorkflowSettings; + private final UnaryCallSettings + getMigrationSubtaskSettings; + private final PagedCallSettings< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + listMigrationSubtasksSettings; + + private static final PagedListDescriptor< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse, MigrationWorkflow> + LIST_MIGRATION_WORKFLOWS_PAGE_STR_DESC = + new PagedListDescriptor< + ListMigrationWorkflowsRequest, ListMigrationWorkflowsResponse, MigrationWorkflow>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListMigrationWorkflowsRequest injectToken( + ListMigrationWorkflowsRequest payload, String token) { + return ListMigrationWorkflowsRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListMigrationWorkflowsRequest injectPageSize( + ListMigrationWorkflowsRequest payload, int pageSize) { + return ListMigrationWorkflowsRequest.newBuilder(payload) + .setPageSize(pageSize) + .build(); + } + + @Override + public Integer extractPageSize(ListMigrationWorkflowsRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListMigrationWorkflowsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListMigrationWorkflowsResponse payload) { + return payload.getMigrationWorkflowsList() == null + ? ImmutableList.of() + : payload.getMigrationWorkflowsList(); + } + }; + + private static final PagedListDescriptor< + ListMigrationSubtasksRequest, ListMigrationSubtasksResponse, MigrationSubtask> + LIST_MIGRATION_SUBTASKS_PAGE_STR_DESC = + new PagedListDescriptor< + ListMigrationSubtasksRequest, ListMigrationSubtasksResponse, MigrationSubtask>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListMigrationSubtasksRequest injectToken( + ListMigrationSubtasksRequest payload, String token) { + return ListMigrationSubtasksRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListMigrationSubtasksRequest injectPageSize( + ListMigrationSubtasksRequest payload, int pageSize) { + return ListMigrationSubtasksRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListMigrationSubtasksRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListMigrationSubtasksResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListMigrationSubtasksResponse payload) { + return payload.getMigrationSubtasksList() == null + ? ImmutableList.of() + : payload.getMigrationSubtasksList(); + } + }; + + private static final PagedListResponseFactory< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + LIST_MIGRATION_WORKFLOWS_PAGE_STR_FACT = + new PagedListResponseFactory< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable + callable, + ListMigrationWorkflowsRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + MigrationWorkflow> + pageContext = + PageContext.create( + callable, LIST_MIGRATION_WORKFLOWS_PAGE_STR_DESC, request, context); + return ListMigrationWorkflowsPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + private static final PagedListResponseFactory< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + LIST_MIGRATION_SUBTASKS_PAGE_STR_FACT = + new PagedListResponseFactory< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListMigrationSubtasksRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListMigrationSubtasksRequest, ListMigrationSubtasksResponse, MigrationSubtask> + pageContext = + PageContext.create( + callable, LIST_MIGRATION_SUBTASKS_PAGE_STR_DESC, request, context); + return ListMigrationSubtasksPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + /** Returns the object with the settings used for calls to createMigrationWorkflow. */ + public UnaryCallSettings + createMigrationWorkflowSettings() { + return createMigrationWorkflowSettings; + } + + /** Returns the object with the settings used for calls to getMigrationWorkflow. */ + public UnaryCallSettings + getMigrationWorkflowSettings() { + return getMigrationWorkflowSettings; + } + + /** Returns the object with the settings used for calls to listMigrationWorkflows. */ + public PagedCallSettings< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + listMigrationWorkflowsSettings() { + return listMigrationWorkflowsSettings; + } + + /** Returns the object with the settings used for calls to deleteMigrationWorkflow. */ + public UnaryCallSettings + deleteMigrationWorkflowSettings() { + return deleteMigrationWorkflowSettings; + } + + /** Returns the object with the settings used for calls to startMigrationWorkflow. */ + public UnaryCallSettings startMigrationWorkflowSettings() { + return startMigrationWorkflowSettings; + } + + /** Returns the object with the settings used for calls to getMigrationSubtask. */ + public UnaryCallSettings + getMigrationSubtaskSettings() { + return getMigrationSubtaskSettings; + } + + /** Returns the object with the settings used for calls to listMigrationSubtasks. */ + public PagedCallSettings< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + listMigrationSubtasksSettings() { + return listMigrationSubtasksSettings; + } + + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") + public MigrationServiceStub createStub() throws IOException { + if (getTransportChannelProvider() + .getTransportName() + .equals(GrpcTransportChannel.getGrpcTransportName())) { + return GrpcMigrationServiceStub.create(this); + } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); + } + + /** Returns a builder for the default ExecutorProvider for this service. */ + public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { + return InstantiatingExecutorProvider.newBuilder(); + } + + /** Returns the default service endpoint. */ + public static String getDefaultEndpoint() { + return "bigquerymigration.googleapis.com:443"; + } + + /** Returns the default mTLS service endpoint. */ + public static String getDefaultMtlsEndpoint() { + return "bigquerymigration.mtls.googleapis.com:443"; + } + + /** Returns the default service scopes. */ + public static List getDefaultServiceScopes() { + return DEFAULT_SERVICE_SCOPES; + } + + /** Returns a builder for the default credentials for this service. */ + public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { + return GoogleCredentialsProvider.newBuilder() + .setScopesToApply(DEFAULT_SERVICE_SCOPES) + .setUseJwtAccessWithScope(true); + } + + /** Returns a builder for the default ChannelProvider for this service. */ + public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { + return InstantiatingGrpcChannelProvider.newBuilder() + .setMaxInboundMessageSize(Integer.MAX_VALUE); + } + + public static TransportChannelProvider defaultTransportChannelProvider() { + return defaultGrpcTransportProviderBuilder().build(); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { + return ApiClientHeaderProvider.newBuilder() + .setGeneratedLibToken( + "gapic", GaxProperties.getLibraryVersion(MigrationServiceStubSettings.class)) + .setTransportToken( + GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder() { + return Builder.createDefault(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder(ClientContext clientContext) { + return new Builder(clientContext); + } + + /** Returns a builder containing all the values of this settings class. */ + public Builder toBuilder() { + return new Builder(this); + } + + protected MigrationServiceStubSettings(Builder settingsBuilder) throws IOException { + super(settingsBuilder); + + createMigrationWorkflowSettings = settingsBuilder.createMigrationWorkflowSettings().build(); + getMigrationWorkflowSettings = settingsBuilder.getMigrationWorkflowSettings().build(); + listMigrationWorkflowsSettings = settingsBuilder.listMigrationWorkflowsSettings().build(); + deleteMigrationWorkflowSettings = settingsBuilder.deleteMigrationWorkflowSettings().build(); + startMigrationWorkflowSettings = settingsBuilder.startMigrationWorkflowSettings().build(); + getMigrationSubtaskSettings = settingsBuilder.getMigrationSubtaskSettings().build(); + listMigrationSubtasksSettings = settingsBuilder.listMigrationSubtasksSettings().build(); + } + + /** Builder for MigrationServiceStubSettings. */ + public static class Builder extends StubSettings.Builder { + private final ImmutableList> unaryMethodSettingsBuilders; + private final UnaryCallSettings.Builder + createMigrationWorkflowSettings; + private final UnaryCallSettings.Builder + getMigrationWorkflowSettings; + private final PagedCallSettings.Builder< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + listMigrationWorkflowsSettings; + private final UnaryCallSettings.Builder + deleteMigrationWorkflowSettings; + private final UnaryCallSettings.Builder + startMigrationWorkflowSettings; + private final UnaryCallSettings.Builder + getMigrationSubtaskSettings; + private final PagedCallSettings.Builder< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + listMigrationSubtasksSettings; + private static final ImmutableMap> + RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = + ImmutableMap.builder(); + definitions.put( + "no_retry_1_codes", ImmutableSet.copyOf(Lists.newArrayList())); + definitions.put( + "retry_policy_0_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetrySettings settings = null; + settings = + RetrySettings.newBuilder() + .setInitialRpcTimeout(Duration.ofMillis(60000L)) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ofMillis(60000L)) + .setTotalTimeout(Duration.ofMillis(60000L)) + .build(); + definitions.put("no_retry_1_params", settings); + settings = + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(1000L)) + .setRetryDelayMultiplier(1.3) + .setMaxRetryDelay(Duration.ofMillis(10000L)) + .setInitialRpcTimeout(Duration.ofMillis(120000L)) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ofMillis(120000L)) + .setTotalTimeout(Duration.ofMillis(120000L)) + .build(); + definitions.put("retry_policy_0_params", settings); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + protected Builder() { + this(((ClientContext) null)); + } + + protected Builder(ClientContext clientContext) { + super(clientContext); + + createMigrationWorkflowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getMigrationWorkflowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + listMigrationWorkflowsSettings = + PagedCallSettings.newBuilder(LIST_MIGRATION_WORKFLOWS_PAGE_STR_FACT); + deleteMigrationWorkflowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + startMigrationWorkflowSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getMigrationSubtaskSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + listMigrationSubtasksSettings = + PagedCallSettings.newBuilder(LIST_MIGRATION_SUBTASKS_PAGE_STR_FACT); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createMigrationWorkflowSettings, + getMigrationWorkflowSettings, + listMigrationWorkflowsSettings, + deleteMigrationWorkflowSettings, + startMigrationWorkflowSettings, + getMigrationSubtaskSettings, + listMigrationSubtasksSettings); + initDefaults(this); + } + + protected Builder(MigrationServiceStubSettings settings) { + super(settings); + + createMigrationWorkflowSettings = settings.createMigrationWorkflowSettings.toBuilder(); + getMigrationWorkflowSettings = settings.getMigrationWorkflowSettings.toBuilder(); + listMigrationWorkflowsSettings = settings.listMigrationWorkflowsSettings.toBuilder(); + deleteMigrationWorkflowSettings = settings.deleteMigrationWorkflowSettings.toBuilder(); + startMigrationWorkflowSettings = settings.startMigrationWorkflowSettings.toBuilder(); + getMigrationSubtaskSettings = settings.getMigrationSubtaskSettings.toBuilder(); + listMigrationSubtasksSettings = settings.listMigrationSubtasksSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createMigrationWorkflowSettings, + getMigrationWorkflowSettings, + listMigrationWorkflowsSettings, + deleteMigrationWorkflowSettings, + startMigrationWorkflowSettings, + getMigrationSubtaskSettings, + listMigrationSubtasksSettings); + } + + private static Builder createDefault() { + Builder builder = new Builder(((ClientContext) null)); + + builder.setTransportChannelProvider(defaultTransportChannelProvider()); + builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); + builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); + builder.setEndpoint(getDefaultEndpoint()); + builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); + builder.setSwitchToMtlsEndpointAllowed(true); + + return initDefaults(builder); + } + + private static Builder initDefaults(Builder builder) { + builder + .createMigrationWorkflowSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); + + builder + .getMigrationWorkflowSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .listMigrationWorkflowsSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .deleteMigrationWorkflowSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); + + builder + .startMigrationWorkflowSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .getMigrationSubtaskSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .listMigrationSubtasksSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + return builder; + } + + /** + * Applies the given settings updater function to all of the unary API methods in this service. + * + *

Note: This method does not support applying settings to streaming methods. + */ + public Builder applyToAllUnaryMethods( + ApiFunction, Void> settingsUpdater) { + super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); + return this; + } + + public ImmutableList> unaryMethodSettingsBuilders() { + return unaryMethodSettingsBuilders; + } + + /** Returns the builder for the settings used for calls to createMigrationWorkflow. */ + public UnaryCallSettings.Builder + createMigrationWorkflowSettings() { + return createMigrationWorkflowSettings; + } + + /** Returns the builder for the settings used for calls to getMigrationWorkflow. */ + public UnaryCallSettings.Builder + getMigrationWorkflowSettings() { + return getMigrationWorkflowSettings; + } + + /** Returns the builder for the settings used for calls to listMigrationWorkflows. */ + public PagedCallSettings.Builder< + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + ListMigrationWorkflowsPagedResponse> + listMigrationWorkflowsSettings() { + return listMigrationWorkflowsSettings; + } + + /** Returns the builder for the settings used for calls to deleteMigrationWorkflow. */ + public UnaryCallSettings.Builder + deleteMigrationWorkflowSettings() { + return deleteMigrationWorkflowSettings; + } + + /** Returns the builder for the settings used for calls to startMigrationWorkflow. */ + public UnaryCallSettings.Builder + startMigrationWorkflowSettings() { + return startMigrationWorkflowSettings; + } + + /** Returns the builder for the settings used for calls to getMigrationSubtask. */ + public UnaryCallSettings.Builder + getMigrationSubtaskSettings() { + return getMigrationSubtaskSettings; + } + + /** Returns the builder for the settings used for calls to listMigrationSubtasks. */ + public PagedCallSettings.Builder< + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationSubtasksPagedResponse> + listMigrationSubtasksSettings() { + return listMigrationSubtasksSettings; + } + + @Override + public MigrationServiceStubSettings build() throws IOException { + return new MigrationServiceStubSettings(this); + } + } +} diff --git a/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceClientTest.java b/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceClientTest.java new file mode 100644 index 0000000..74ef9f2 --- /dev/null +++ b/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceClientTest.java @@ -0,0 +1,687 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationSubtasksPagedResponse; +import static com.google.cloud.bigquery.migration.v2alpha.MigrationServiceClient.ListMigrationWorkflowsPagedResponse; + +import com.google.api.gax.core.NoCredentialsProvider; +import com.google.api.gax.grpc.GaxGrpcProperties; +import com.google.api.gax.grpc.testing.LocalChannelProvider; +import com.google.api.gax.grpc.testing.MockGrpcService; +import com.google.api.gax.grpc.testing.MockServiceHelper; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.InvalidArgumentException; +import com.google.common.collect.Lists; +import com.google.protobuf.AbstractMessage; +import com.google.protobuf.Empty; +import com.google.protobuf.Timestamp; +import com.google.rpc.ErrorInfo; +import io.grpc.StatusRuntimeException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.UUID; +import javax.annotation.Generated; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +@Generated("by gapic-generator-java") +public class MigrationServiceClientTest { + private static MockMigrationService mockMigrationService; + private static MockServiceHelper mockServiceHelper; + private LocalChannelProvider channelProvider; + private MigrationServiceClient client; + + @BeforeClass + public static void startStaticServer() { + mockMigrationService = new MockMigrationService(); + mockServiceHelper = + new MockServiceHelper( + UUID.randomUUID().toString(), Arrays.asList(mockMigrationService)); + mockServiceHelper.start(); + } + + @AfterClass + public static void stopServer() { + mockServiceHelper.stop(); + } + + @Before + public void setUp() throws IOException { + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); + MigrationServiceSettings settings = + MigrationServiceSettings.newBuilder() + .setTransportChannelProvider(channelProvider) + .setCredentialsProvider(NoCredentialsProvider.create()) + .build(); + client = MigrationServiceClient.create(settings); + } + + @After + public void tearDown() throws Exception { + client.close(); + } + + @Test + public void createMigrationWorkflowTest() throws Exception { + MigrationWorkflow expectedResponse = + MigrationWorkflow.newBuilder() + .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString()) + .setDisplayName("displayName1714148973") + .putAllTasks(new HashMap()) + .setCreateTime(Timestamp.newBuilder().build()) + .setLastUpdateTime(Timestamp.newBuilder().build()) + .build(); + mockMigrationService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build(); + + MigrationWorkflow actualResponse = client.createMigrationWorkflow(parent, migrationWorkflow); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateMigrationWorkflowRequest actualRequest = + ((CreateMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(migrationWorkflow, actualRequest.getMigrationWorkflow()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createMigrationWorkflowExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build(); + client.createMigrationWorkflow(parent, migrationWorkflow); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createMigrationWorkflowTest2() throws Exception { + MigrationWorkflow expectedResponse = + MigrationWorkflow.newBuilder() + .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString()) + .setDisplayName("displayName1714148973") + .putAllTasks(new HashMap()) + .setCreateTime(Timestamp.newBuilder().build()) + .setLastUpdateTime(Timestamp.newBuilder().build()) + .build(); + mockMigrationService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build(); + + MigrationWorkflow actualResponse = client.createMigrationWorkflow(parent, migrationWorkflow); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateMigrationWorkflowRequest actualRequest = + ((CreateMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(migrationWorkflow, actualRequest.getMigrationWorkflow()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createMigrationWorkflowExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String parent = "parent-995424086"; + MigrationWorkflow migrationWorkflow = MigrationWorkflow.newBuilder().build(); + client.createMigrationWorkflow(parent, migrationWorkflow); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getMigrationWorkflowTest() throws Exception { + MigrationWorkflow expectedResponse = + MigrationWorkflow.newBuilder() + .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString()) + .setDisplayName("displayName1714148973") + .putAllTasks(new HashMap()) + .setCreateTime(Timestamp.newBuilder().build()) + .setLastUpdateTime(Timestamp.newBuilder().build()) + .build(); + mockMigrationService.addResponse(expectedResponse); + + MigrationWorkflowName name = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + + MigrationWorkflow actualResponse = client.getMigrationWorkflow(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetMigrationWorkflowRequest actualRequest = + ((GetMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getMigrationWorkflowExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + MigrationWorkflowName name = + MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + client.getMigrationWorkflow(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getMigrationWorkflowTest2() throws Exception { + MigrationWorkflow expectedResponse = + MigrationWorkflow.newBuilder() + .setName(MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]").toString()) + .setDisplayName("displayName1714148973") + .putAllTasks(new HashMap()) + .setCreateTime(Timestamp.newBuilder().build()) + .setLastUpdateTime(Timestamp.newBuilder().build()) + .build(); + mockMigrationService.addResponse(expectedResponse); + + String name = "name3373707"; + + MigrationWorkflow actualResponse = client.getMigrationWorkflow(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetMigrationWorkflowRequest actualRequest = + ((GetMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getMigrationWorkflowExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String name = "name3373707"; + client.getMigrationWorkflow(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listMigrationWorkflowsTest() throws Exception { + MigrationWorkflow responsesElement = MigrationWorkflow.newBuilder().build(); + ListMigrationWorkflowsResponse expectedResponse = + ListMigrationWorkflowsResponse.newBuilder() + .setNextPageToken("") + .addAllMigrationWorkflows(Arrays.asList(responsesElement)) + .build(); + mockMigrationService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + + ListMigrationWorkflowsPagedResponse pagedListResponse = client.listMigrationWorkflows(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getMigrationWorkflowsList().get(0), resources.get(0)); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListMigrationWorkflowsRequest actualRequest = + ((ListMigrationWorkflowsRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listMigrationWorkflowsExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listMigrationWorkflows(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listMigrationWorkflowsTest2() throws Exception { + MigrationWorkflow responsesElement = MigrationWorkflow.newBuilder().build(); + ListMigrationWorkflowsResponse expectedResponse = + ListMigrationWorkflowsResponse.newBuilder() + .setNextPageToken("") + .addAllMigrationWorkflows(Arrays.asList(responsesElement)) + .build(); + mockMigrationService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListMigrationWorkflowsPagedResponse pagedListResponse = client.listMigrationWorkflows(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getMigrationWorkflowsList().get(0), resources.get(0)); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListMigrationWorkflowsRequest actualRequest = + ((ListMigrationWorkflowsRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listMigrationWorkflowsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String parent = "parent-995424086"; + client.listMigrationWorkflows(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteMigrationWorkflowTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockMigrationService.addResponse(expectedResponse); + + MigrationWorkflowName name = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + + client.deleteMigrationWorkflow(name); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteMigrationWorkflowRequest actualRequest = + ((DeleteMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteMigrationWorkflowExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + MigrationWorkflowName name = + MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + client.deleteMigrationWorkflow(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteMigrationWorkflowTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockMigrationService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.deleteMigrationWorkflow(name); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteMigrationWorkflowRequest actualRequest = + ((DeleteMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteMigrationWorkflowExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String name = "name3373707"; + client.deleteMigrationWorkflow(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void startMigrationWorkflowTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockMigrationService.addResponse(expectedResponse); + + MigrationWorkflowName name = MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + + client.startMigrationWorkflow(name); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + StartMigrationWorkflowRequest actualRequest = + ((StartMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void startMigrationWorkflowExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + MigrationWorkflowName name = + MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + client.startMigrationWorkflow(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void startMigrationWorkflowTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockMigrationService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.startMigrationWorkflow(name); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + StartMigrationWorkflowRequest actualRequest = + ((StartMigrationWorkflowRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void startMigrationWorkflowExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String name = "name3373707"; + client.startMigrationWorkflow(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getMigrationSubtaskTest() throws Exception { + MigrationSubtask expectedResponse = + MigrationSubtask.newBuilder() + .setName( + MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]") + .toString()) + .setTaskId("taskId-880873088") + .setType("type3575610") + .setProcessingError(ErrorInfo.newBuilder().build()) + .addAllResourceErrorDetails(new ArrayList()) + .setResourceErrorCount(-929997465) + .setCreateTime(Timestamp.newBuilder().build()) + .setLastUpdateTime(Timestamp.newBuilder().build()) + .addAllMetrics(new ArrayList()) + .build(); + mockMigrationService.addResponse(expectedResponse); + + MigrationSubtaskName name = + MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]"); + + MigrationSubtask actualResponse = client.getMigrationSubtask(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetMigrationSubtaskRequest actualRequest = ((GetMigrationSubtaskRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getMigrationSubtaskExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + MigrationSubtaskName name = + MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]"); + client.getMigrationSubtask(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getMigrationSubtaskTest2() throws Exception { + MigrationSubtask expectedResponse = + MigrationSubtask.newBuilder() + .setName( + MigrationSubtaskName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]", "[SUBTASK]") + .toString()) + .setTaskId("taskId-880873088") + .setType("type3575610") + .setProcessingError(ErrorInfo.newBuilder().build()) + .addAllResourceErrorDetails(new ArrayList()) + .setResourceErrorCount(-929997465) + .setCreateTime(Timestamp.newBuilder().build()) + .setLastUpdateTime(Timestamp.newBuilder().build()) + .addAllMetrics(new ArrayList()) + .build(); + mockMigrationService.addResponse(expectedResponse); + + String name = "name3373707"; + + MigrationSubtask actualResponse = client.getMigrationSubtask(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetMigrationSubtaskRequest actualRequest = ((GetMigrationSubtaskRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getMigrationSubtaskExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String name = "name3373707"; + client.getMigrationSubtask(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listMigrationSubtasksTest() throws Exception { + MigrationSubtask responsesElement = MigrationSubtask.newBuilder().build(); + ListMigrationSubtasksResponse expectedResponse = + ListMigrationSubtasksResponse.newBuilder() + .setNextPageToken("") + .addAllMigrationSubtasks(Arrays.asList(responsesElement)) + .build(); + mockMigrationService.addResponse(expectedResponse); + + MigrationWorkflowName parent = + MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + + ListMigrationSubtasksPagedResponse pagedListResponse = client.listMigrationSubtasks(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getMigrationSubtasksList().get(0), resources.get(0)); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListMigrationSubtasksRequest actualRequest = + ((ListMigrationSubtasksRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listMigrationSubtasksExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + MigrationWorkflowName parent = + MigrationWorkflowName.of("[PROJECT]", "[LOCATION]", "[WORKFLOW]"); + client.listMigrationSubtasks(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listMigrationSubtasksTest2() throws Exception { + MigrationSubtask responsesElement = MigrationSubtask.newBuilder().build(); + ListMigrationSubtasksResponse expectedResponse = + ListMigrationSubtasksResponse.newBuilder() + .setNextPageToken("") + .addAllMigrationSubtasks(Arrays.asList(responsesElement)) + .build(); + mockMigrationService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListMigrationSubtasksPagedResponse pagedListResponse = client.listMigrationSubtasks(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getMigrationSubtasksList().get(0), resources.get(0)); + + List actualRequests = mockMigrationService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListMigrationSubtasksRequest actualRequest = + ((ListMigrationSubtasksRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listMigrationSubtasksExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMigrationService.addException(exception); + + try { + String parent = "parent-995424086"; + client.listMigrationSubtasks(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } +} diff --git a/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MockMigrationService.java b/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MockMigrationService.java new file mode 100644 index 0000000..a8d1c1e --- /dev/null +++ b/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MockMigrationService.java @@ -0,0 +1,59 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import com.google.api.core.BetaApi; +import com.google.api.gax.grpc.testing.MockGrpcService; +import com.google.protobuf.AbstractMessage; +import io.grpc.ServerServiceDefinition; +import java.util.List; +import javax.annotation.Generated; + +@BetaApi +@Generated("by gapic-generator-java") +public class MockMigrationService implements MockGrpcService { + private final MockMigrationServiceImpl serviceImpl; + + public MockMigrationService() { + serviceImpl = new MockMigrationServiceImpl(); + } + + @Override + public List getRequests() { + return serviceImpl.getRequests(); + } + + @Override + public void addResponse(AbstractMessage response) { + serviceImpl.addResponse(response); + } + + @Override + public void addException(Exception exception) { + serviceImpl.addException(exception); + } + + @Override + public ServerServiceDefinition getServiceDefinition() { + return serviceImpl.bindService(); + } + + @Override + public void reset() { + serviceImpl.reset(); + } +} diff --git a/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MockMigrationServiceImpl.java b/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MockMigrationServiceImpl.java new file mode 100644 index 0000000..6aa062c --- /dev/null +++ b/google-cloud-bigquerymigration/src/test/java/com/google/cloud/bigquery/migration/v2alpha/MockMigrationServiceImpl.java @@ -0,0 +1,210 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import com.google.api.core.BetaApi; +import com.google.cloud.bigquery.migration.v2alpha.MigrationServiceGrpc.MigrationServiceImplBase; +import com.google.protobuf.AbstractMessage; +import com.google.protobuf.Empty; +import io.grpc.stub.StreamObserver; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import javax.annotation.Generated; + +@BetaApi +@Generated("by gapic-generator-java") +public class MockMigrationServiceImpl extends MigrationServiceImplBase { + private List requests; + private Queue responses; + + public MockMigrationServiceImpl() { + requests = new ArrayList<>(); + responses = new LinkedList<>(); + } + + public List getRequests() { + return requests; + } + + public void addResponse(AbstractMessage response) { + responses.add(response); + } + + public void setResponses(List responses) { + this.responses = new LinkedList(responses); + } + + public void addException(Exception exception) { + responses.add(exception); + } + + public void reset() { + requests = new ArrayList<>(); + responses = new LinkedList<>(); + } + + @Override + public void createMigrationWorkflow( + CreateMigrationWorkflowRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof MigrationWorkflow) { + requests.add(request); + responseObserver.onNext(((MigrationWorkflow) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateMigrationWorkflow, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + MigrationWorkflow.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void getMigrationWorkflow( + GetMigrationWorkflowRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof MigrationWorkflow) { + requests.add(request); + responseObserver.onNext(((MigrationWorkflow) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetMigrationWorkflow, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + MigrationWorkflow.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void listMigrationWorkflows( + ListMigrationWorkflowsRequest request, + StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof ListMigrationWorkflowsResponse) { + requests.add(request); + responseObserver.onNext(((ListMigrationWorkflowsResponse) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListMigrationWorkflows, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + ListMigrationWorkflowsResponse.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void deleteMigrationWorkflow( + DeleteMigrationWorkflowRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Empty) { + requests.add(request); + responseObserver.onNext(((Empty) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteMigrationWorkflow, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void startMigrationWorkflow( + StartMigrationWorkflowRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Empty) { + requests.add(request); + responseObserver.onNext(((Empty) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method StartMigrationWorkflow, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void getMigrationSubtask( + GetMigrationSubtaskRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof MigrationSubtask) { + requests.add(request); + responseObserver.onNext(((MigrationSubtask) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetMigrationSubtask, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + MigrationSubtask.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void listMigrationSubtasks( + ListMigrationSubtasksRequest request, + StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof ListMigrationSubtasksResponse) { + requests.add(request); + responseObserver.onNext(((ListMigrationSubtasksResponse) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListMigrationSubtasks, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + ListMigrationSubtasksResponse.class.getName(), + Exception.class.getName()))); + } + } +} diff --git a/grpc-google-cloud-bigquerymigration-v2alpha/pom.xml b/grpc-google-cloud-bigquerymigration-v2alpha/pom.xml new file mode 100644 index 0000000..a268bd7 --- /dev/null +++ b/grpc-google-cloud-bigquerymigration-v2alpha/pom.xml @@ -0,0 +1,69 @@ + + 4.0.0 + com.google.api.grpc + grpc-google-cloud-bigquerymigration-v2alpha + 0.0.1-SNAPSHOT + grpc-google-cloud-bigquerymigration-v2alpha + GRPC library for google-cloud-bigquerymigration + + com.google.cloud + google-cloud-bigquerymigration-parent + 0.0.1-SNAPSHOT + + + + io.grpc + grpc-api + + + io.grpc + grpc-stub + + + io.grpc + grpc-protobuf + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-common-protos + + + com.google.api.grpc + proto-google-cloud-bigquerymigration-v2alpha + + + com.google.guava + guava + + + + + + java9 + + [9,) + + + + javax.annotation + javax.annotation-api + + + + + + + + + org.codehaus.mojo + flatten-maven-plugin + + + + \ No newline at end of file diff --git a/grpc-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceGrpc.java b/grpc-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceGrpc.java new file mode 100644 index 0000000..851c5fd --- /dev/null +++ b/grpc-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationServiceGrpc.java @@ -0,0 +1,1135 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.migration.v2alpha; + +import static io.grpc.MethodDescriptor.generateFullMethodName; + +/** + * + * + *
+ * Service to handle EDW migrations.
+ * 
+ */ +@javax.annotation.Generated( + value = "by gRPC proto compiler", + comments = "Source: google/cloud/bigquery/migration/v2alpha/migration_service.proto") +public final class MigrationServiceGrpc { + + private MigrationServiceGrpc() {} + + public static final String SERVICE_NAME = + "google.cloud.bigquery.migration.v2alpha.MigrationService"; + + // Static method descriptors that strictly reflect the proto. + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getCreateMigrationWorkflowMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "CreateMigrationWorkflow", + requestType = + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest.class, + responseType = com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getCreateMigrationWorkflowMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getCreateMigrationWorkflowMethod; + if ((getCreateMigrationWorkflowMethod = MigrationServiceGrpc.getCreateMigrationWorkflowMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getCreateMigrationWorkflowMethod = + MigrationServiceGrpc.getCreateMigrationWorkflowMethod) + == null) { + MigrationServiceGrpc.getCreateMigrationWorkflowMethod = + getCreateMigrationWorkflowMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "CreateMigrationWorkflow")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .CreateMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow + .getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("CreateMigrationWorkflow")) + .build(); + } + } + } + return getCreateMigrationWorkflowMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getGetMigrationWorkflowMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetMigrationWorkflow", + requestType = com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest.class, + responseType = com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getGetMigrationWorkflowMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getGetMigrationWorkflowMethod; + if ((getGetMigrationWorkflowMethod = MigrationServiceGrpc.getGetMigrationWorkflowMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getGetMigrationWorkflowMethod = MigrationServiceGrpc.getGetMigrationWorkflowMethod) + == null) { + MigrationServiceGrpc.getGetMigrationWorkflowMethod = + getGetMigrationWorkflowMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "GetMigrationWorkflow")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .GetMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow + .getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("GetMigrationWorkflow")) + .build(); + } + } + } + return getGetMigrationWorkflowMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse> + getListMigrationWorkflowsMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListMigrationWorkflows", + requestType = com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest.class, + responseType = + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse> + getListMigrationWorkflowsMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse> + getListMigrationWorkflowsMethod; + if ((getListMigrationWorkflowsMethod = MigrationServiceGrpc.getListMigrationWorkflowsMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getListMigrationWorkflowsMethod = MigrationServiceGrpc.getListMigrationWorkflowsMethod) + == null) { + MigrationServiceGrpc.getListMigrationWorkflowsMethod = + getListMigrationWorkflowsMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "ListMigrationWorkflows")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .ListMigrationWorkflowsRequest.getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .ListMigrationWorkflowsResponse.getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("ListMigrationWorkflows")) + .build(); + } + } + } + return getListMigrationWorkflowsMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest, + com.google.protobuf.Empty> + getDeleteMigrationWorkflowMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DeleteMigrationWorkflow", + requestType = + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest.class, + responseType = com.google.protobuf.Empty.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest, + com.google.protobuf.Empty> + getDeleteMigrationWorkflowMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest, + com.google.protobuf.Empty> + getDeleteMigrationWorkflowMethod; + if ((getDeleteMigrationWorkflowMethod = MigrationServiceGrpc.getDeleteMigrationWorkflowMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getDeleteMigrationWorkflowMethod = + MigrationServiceGrpc.getDeleteMigrationWorkflowMethod) + == null) { + MigrationServiceGrpc.getDeleteMigrationWorkflowMethod = + getDeleteMigrationWorkflowMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "DeleteMigrationWorkflow")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .DeleteMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.protobuf.Empty.getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("DeleteMigrationWorkflow")) + .build(); + } + } + } + return getDeleteMigrationWorkflowMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest, + com.google.protobuf.Empty> + getStartMigrationWorkflowMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "StartMigrationWorkflow", + requestType = com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest.class, + responseType = com.google.protobuf.Empty.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest, + com.google.protobuf.Empty> + getStartMigrationWorkflowMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest, + com.google.protobuf.Empty> + getStartMigrationWorkflowMethod; + if ((getStartMigrationWorkflowMethod = MigrationServiceGrpc.getStartMigrationWorkflowMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getStartMigrationWorkflowMethod = MigrationServiceGrpc.getStartMigrationWorkflowMethod) + == null) { + MigrationServiceGrpc.getStartMigrationWorkflowMethod = + getStartMigrationWorkflowMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "StartMigrationWorkflow")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .StartMigrationWorkflowRequest.getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.protobuf.Empty.getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("StartMigrationWorkflow")) + .build(); + } + } + } + return getStartMigrationWorkflowMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask> + getGetMigrationSubtaskMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetMigrationSubtask", + requestType = com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest.class, + responseType = com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask> + getGetMigrationSubtaskMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask> + getGetMigrationSubtaskMethod; + if ((getGetMigrationSubtaskMethod = MigrationServiceGrpc.getGetMigrationSubtaskMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getGetMigrationSubtaskMethod = MigrationServiceGrpc.getGetMigrationSubtaskMethod) + == null) { + MigrationServiceGrpc.getGetMigrationSubtaskMethod = + getGetMigrationSubtaskMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "GetMigrationSubtask")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask + .getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("GetMigrationSubtask")) + .build(); + } + } + } + return getGetMigrationSubtaskMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse> + getListMigrationSubtasksMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListMigrationSubtasks", + requestType = com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest.class, + responseType = + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse> + getListMigrationSubtasksMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse> + getListMigrationSubtasksMethod; + if ((getListMigrationSubtasksMethod = MigrationServiceGrpc.getListMigrationSubtasksMethod) + == null) { + synchronized (MigrationServiceGrpc.class) { + if ((getListMigrationSubtasksMethod = MigrationServiceGrpc.getListMigrationSubtasksMethod) + == null) { + MigrationServiceGrpc.getListMigrationSubtasksMethod = + getListMigrationSubtasksMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + generateFullMethodName(SERVICE_NAME, "ListMigrationSubtasks")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .ListMigrationSubtasksRequest.getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.migration.v2alpha + .ListMigrationSubtasksResponse.getDefaultInstance())) + .setSchemaDescriptor( + new MigrationServiceMethodDescriptorSupplier("ListMigrationSubtasks")) + .build(); + } + } + } + return getListMigrationSubtasksMethod; + } + + /** Creates a new async stub that supports all call types for the service */ + public static MigrationServiceStub newStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public MigrationServiceStub newStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MigrationServiceStub(channel, callOptions); + } + }; + return MigrationServiceStub.newStub(factory, channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static MigrationServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public MigrationServiceBlockingStub newStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MigrationServiceBlockingStub(channel, callOptions); + } + }; + return MigrationServiceBlockingStub.newStub(factory, channel); + } + + /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ + public static MigrationServiceFutureStub newFutureStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public MigrationServiceFutureStub newStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MigrationServiceFutureStub(channel, callOptions); + } + }; + return MigrationServiceFutureStub.newStub(factory, channel); + } + + /** + * + * + *
+   * Service to handle EDW migrations.
+   * 
+ */ + public abstract static class MigrationServiceImplBase implements io.grpc.BindableService { + + /** + * + * + *
+     * Creates a migration workflow.
+     * 
+ */ + public void createMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getCreateMigrationWorkflowMethod(), responseObserver); + } + + /** + * + * + *
+     * Gets a previously created migration workflow.
+     * 
+ */ + public void getMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getGetMigrationWorkflowMethod(), responseObserver); + } + + /** + * + * + *
+     * Lists previously created migration workflow.
+     * 
+ */ + public void listMigrationWorkflows( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest request, + io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse> + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getListMigrationWorkflowsMethod(), responseObserver); + } + + /** + * + * + *
+     * Deletes a migration workflow by name.
+     * 
+ */ + public void deleteMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getDeleteMigrationWorkflowMethod(), responseObserver); + } + + /** + * + * + *
+     * Starts a previously created migration workflow. I.e., the state transitions
+     * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING.
+     * An error will be signaled if the state is anything other than DRAFT or
+     * RUNNING.
+     * 
+ */ + public void startMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getStartMigrationWorkflowMethod(), responseObserver); + } + + /** + * + * + *
+     * Gets a previously created migration subtask.
+     * 
+ */ + public void getMigrationSubtask( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getGetMigrationSubtaskMethod(), responseObserver); + } + + /** + * + * + *
+     * Lists previously created migration subtasks.
+     * 
+ */ + public void listMigrationSubtasks( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest request, + io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse> + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getListMigrationSubtasksMethod(), responseObserver); + } + + @java.lang.Override + public final io.grpc.ServerServiceDefinition bindService() { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getCreateMigrationWorkflowMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow>( + this, METHODID_CREATE_MIGRATION_WORKFLOW))) + .addMethod( + getGetMigrationWorkflowMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow>( + this, METHODID_GET_MIGRATION_WORKFLOW))) + .addMethod( + getListMigrationWorkflowsMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse>( + this, METHODID_LIST_MIGRATION_WORKFLOWS))) + .addMethod( + getDeleteMigrationWorkflowMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest, + com.google.protobuf.Empty>(this, METHODID_DELETE_MIGRATION_WORKFLOW))) + .addMethod( + getStartMigrationWorkflowMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest, + com.google.protobuf.Empty>(this, METHODID_START_MIGRATION_WORKFLOW))) + .addMethod( + getGetMigrationSubtaskMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask>( + this, METHODID_GET_MIGRATION_SUBTASK))) + .addMethod( + getListMigrationSubtasksMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse>( + this, METHODID_LIST_MIGRATION_SUBTASKS))) + .build(); + } + } + + /** + * + * + *
+   * Service to handle EDW migrations.
+   * 
+ */ + public static final class MigrationServiceStub + extends io.grpc.stub.AbstractAsyncStub { + private MigrationServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected MigrationServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MigrationServiceStub(channel, callOptions); + } + + /** + * + * + *
+     * Creates a migration workflow.
+     * 
+ */ + public void createMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getCreateMigrationWorkflowMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Gets a previously created migration workflow.
+     * 
+ */ + public void getMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetMigrationWorkflowMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Lists previously created migration workflow.
+     * 
+ */ + public void listMigrationWorkflows( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest request, + io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse> + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getListMigrationWorkflowsMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Deletes a migration workflow by name.
+     * 
+ */ + public void deleteMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getDeleteMigrationWorkflowMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Starts a previously created migration workflow. I.e., the state transitions
+     * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING.
+     * An error will be signaled if the state is anything other than DRAFT or
+     * RUNNING.
+     * 
+ */ + public void startMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getStartMigrationWorkflowMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Gets a previously created migration subtask.
+     * 
+ */ + public void getMigrationSubtask( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetMigrationSubtaskMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Lists previously created migration subtasks.
+     * 
+ */ + public void listMigrationSubtasks( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest request, + io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse> + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getListMigrationSubtasksMethod(), getCallOptions()), + request, + responseObserver); + } + } + + /** + * + * + *
+   * Service to handle EDW migrations.
+   * 
+ */ + public static final class MigrationServiceBlockingStub + extends io.grpc.stub.AbstractBlockingStub { + private MigrationServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected MigrationServiceBlockingStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MigrationServiceBlockingStub(channel, callOptions); + } + + /** + * + * + *
+     * Creates a migration workflow.
+     * 
+ */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow createMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getCreateMigrationWorkflowMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Gets a previously created migration workflow.
+     * 
+ */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetMigrationWorkflowMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Lists previously created migration workflow.
+     * 
+ */ + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + listMigrationWorkflows( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getListMigrationWorkflowsMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Deletes a migration workflow by name.
+     * 
+ */ + public com.google.protobuf.Empty deleteMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getDeleteMigrationWorkflowMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Starts a previously created migration workflow. I.e., the state transitions
+     * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING.
+     * An error will be signaled if the state is anything other than DRAFT or
+     * RUNNING.
+     * 
+ */ + public com.google.protobuf.Empty startMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getStartMigrationWorkflowMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Gets a previously created migration subtask.
+     * 
+ */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask getMigrationSubtask( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetMigrationSubtaskMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Lists previously created migration subtasks.
+     * 
+ */ + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + listMigrationSubtasks( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getListMigrationSubtasksMethod(), getCallOptions(), request); + } + } + + /** + * + * + *
+   * Service to handle EDW migrations.
+   * 
+ */ + public static final class MigrationServiceFutureStub + extends io.grpc.stub.AbstractFutureStub { + private MigrationServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected MigrationServiceFutureStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MigrationServiceFutureStub(channel, callOptions); + } + + /** + * + * + *
+     * Creates a migration workflow.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + createMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getCreateMigrationWorkflowMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Gets a previously created migration workflow.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow> + getMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetMigrationWorkflowMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Lists previously created migration workflow.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse> + listMigrationWorkflows( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getListMigrationWorkflowsMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Deletes a migration workflow by name.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture + deleteMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getDeleteMigrationWorkflowMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Starts a previously created migration workflow. I.e., the state transitions
+     * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING.
+     * An error will be signaled if the state is anything other than DRAFT or
+     * RUNNING.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture + startMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getStartMigrationWorkflowMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Gets a previously created migration subtask.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask> + getMigrationSubtask( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetMigrationSubtaskMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Lists previously created migration subtasks.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse> + listMigrationSubtasks( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getListMigrationSubtasksMethod(), getCallOptions()), request); + } + } + + private static final int METHODID_CREATE_MIGRATION_WORKFLOW = 0; + private static final int METHODID_GET_MIGRATION_WORKFLOW = 1; + private static final int METHODID_LIST_MIGRATION_WORKFLOWS = 2; + private static final int METHODID_DELETE_MIGRATION_WORKFLOW = 3; + private static final int METHODID_START_MIGRATION_WORKFLOW = 4; + private static final int METHODID_GET_MIGRATION_SUBTASK = 5; + private static final int METHODID_LIST_MIGRATION_SUBTASKS = 6; + + private static final class MethodHandlers + implements io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + private final MigrationServiceImplBase serviceImpl; + private final int methodId; + + MethodHandlers(MigrationServiceImplBase serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_CREATE_MIGRATION_WORKFLOW: + serviceImpl.createMigrationWorkflow( + (com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow>) + responseObserver); + break; + case METHODID_GET_MIGRATION_WORKFLOW: + serviceImpl.getMigrationWorkflow( + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow>) + responseObserver); + break; + case METHODID_LIST_MIGRATION_WORKFLOWS: + serviceImpl.listMigrationWorkflows( + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse>) + responseObserver); + break; + case METHODID_DELETE_MIGRATION_WORKFLOW: + serviceImpl.deleteMigrationWorkflow( + (com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_START_MIGRATION_WORKFLOW: + serviceImpl.startMigrationWorkflow( + (com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_GET_MIGRATION_SUBTASK: + serviceImpl.getMigrationSubtask( + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask>) + responseObserver); + break; + case METHODID_LIST_MIGRATION_SUBTASKS: + serviceImpl.listMigrationSubtasks( + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse>) + responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + default: + throw new AssertionError(); + } + } + } + + private abstract static class MigrationServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, + io.grpc.protobuf.ProtoServiceDescriptorSupplier { + MigrationServiceBaseDescriptorSupplier() {} + + @java.lang.Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto.getDescriptor(); + } + + @java.lang.Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("MigrationService"); + } + } + + private static final class MigrationServiceFileDescriptorSupplier + extends MigrationServiceBaseDescriptorSupplier { + MigrationServiceFileDescriptorSupplier() {} + } + + private static final class MigrationServiceMethodDescriptorSupplier + extends MigrationServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + private final String methodName; + + MigrationServiceMethodDescriptorSupplier(String methodName) { + this.methodName = methodName; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (MigrationServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = + result = + io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new MigrationServiceFileDescriptorSupplier()) + .addMethod(getCreateMigrationWorkflowMethod()) + .addMethod(getGetMigrationWorkflowMethod()) + .addMethod(getListMigrationWorkflowsMethod()) + .addMethod(getDeleteMigrationWorkflowMethod()) + .addMethod(getStartMigrationWorkflowMethod()) + .addMethod(getGetMigrationSubtaskMethod()) + .addMethod(getListMigrationSubtasksMethod()) + .build(); + } + } + } + return result; + } +} diff --git a/java.header b/java.header new file mode 100644 index 0000000..d0970ba --- /dev/null +++ b/java.header @@ -0,0 +1,15 @@ +^/\*$ +^ \* Copyright \d\d\d\d,? Google (Inc\.|LLC)$ +^ \*$ +^ \* Licensed under the Apache License, Version 2\.0 \(the "License"\);$ +^ \* you may not use this file except in compliance with the License\.$ +^ \* You may obtain a copy of the License at$ +^ \*$ +^ \*[ ]+https?://www.apache.org/licenses/LICENSE-2\.0$ +^ \*$ +^ \* Unless required by applicable law or agreed to in writing, software$ +^ \* distributed under the License is distributed on an "AS IS" BASIS,$ +^ \* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.$ +^ \* See the License for the specific language governing permissions and$ +^ \* limitations under the License\.$ +^ \*/$ diff --git a/license-checks.xml b/license-checks.xml new file mode 100644 index 0000000..6597fce --- /dev/null +++ b/license-checks.xml @@ -0,0 +1,10 @@ + + + + + + + + diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 0000000..7724346 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import synthtool as s +from synthtool.languages import java + + +for library in s.get_staging_dirs(): + # put any special-case replacements here + s.move(library) + +s.remove_staging_dirs() +java.common_templates() \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..3dcd263 --- /dev/null +++ b/pom.xml @@ -0,0 +1,188 @@ + + + 4.0.0 + com.google.cloud + google-cloud-bigquerymigration-parent + pom + 0.0.1-SNAPSHOT + Google BigQuery Migration Parent + https://github.com/googleapis/java-bigquerymigration + + Java idiomatic client for Google Cloud Platform services. + + + + com.google.cloud + google-cloud-shared-config + 1.0.0 + + + + + chingor + Jeff Ching + chingor@google.com + Google LLC + + Developer + + + + neenushaji + Neenu Shaji + neenushaji@google.com + Google LLC + + Developer + + + + + Google LLC + + + scm:git:git@github.com:googleapis/java-bigquerymigration.git + scm:git:git@github.com:googleapis/java-bigquerymigration.git + https://github.com/googleapis/java-bigquerymigration + HEAD + + + https://github.com/googleapis/java-bigquerymigration/issues + GitHub Issues + + + + Apache-2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + + + + + UTF-8 + UTF-8 + github + google-cloud-bigquerymigration-parent + + + + + + com.google.cloud + google-cloud-bigquerymigration + 0.0.1-SNAPSHOT + + + com.google.api.grpc + grpc-google-cloud-bigquerymigration-v2alpha + 0.0.1-SNAPSHOT + + + com.google.api.grpc + proto-google-cloud-bigquerymigration-v2alpha + 0.0.1-SNAPSHOT + + + + com.google.cloud + google-cloud-shared-dependencies + 2.1.0 + pom + import + + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + org.objenesis:objenesis + javax.annotation:javax.annotation-api + + + + + + + + + google-cloud-bigquerymigration + grpc-google-cloud-bigquerymigration-v2alpha + proto-google-cloud-bigquerymigration-v2alpha + google-cloud-bigquerymigration-bom + + + + + + org.apache.maven.plugins + maven-project-info-reports-plugin + 3.1.1 + + + + index + dependency-info + team + ci-management + issue-management + licenses + scm + dependency-management + distribution-management + summary + modules + + + + + true + ${site.installationModule} + jar + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.2.0 + + + html + + aggregate + javadoc + + + + + none + protected + true + ${project.build.directory}/javadoc + + + Test helpers packages + com.google.cloud.testing + + + SPI packages + com.google.cloud.spi* + + + + + https://grpc.io/grpc-java/javadoc/ + https://developers.google.com/protocol-buffers/docs/reference/java/ + https://googleapis.dev/java/google-auth-library/latest/ + https://googleapis.dev/java/gax/latest/ + https://googleapis.github.io/api-common-java/${google.api-common.version}/apidocs/ + + + + + + diff --git a/proto-google-cloud-bigquerymigration-v2alpha/clirr-ignored-differences.xml b/proto-google-cloud-bigquerymigration-v2alpha/clirr-ignored-differences.xml new file mode 100644 index 0000000..f4a10d6 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/clirr-ignored-differences.xml @@ -0,0 +1,19 @@ + + + + + 7012 + com/google/cloud/bigquery/migration/v2alpha/*OrBuilder + * get*(*) + + + 7012 + com/google/cloud/bigquery/migration/v2alpha/*OrBuilder + boolean contains*(*) + + + 7012 + com/google/cloud/bigquery/migration/v2alpha/*OrBuilder + boolean has*(*) + + diff --git a/proto-google-cloud-bigquerymigration-v2alpha/pom.xml b/proto-google-cloud-bigquerymigration-v2alpha/pom.xml new file mode 100644 index 0000000..8b5e49c --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/pom.xml @@ -0,0 +1,46 @@ + + 4.0.0 + com.google.api.grpc + proto-google-cloud-bigquerymigration-v2alpha + 0.0.1-SNAPSHOT + proto-google-cloud-bigquerymigration-v2alpha + Proto library for google-cloud-bigquerymigration + + com.google.cloud + google-cloud-bigquerymigration-parent + 0.0.1-SNAPSHOT + + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-common-protos + + + com.google.api.grpc + proto-google-iam-v1 + + + com.google.api + api-common + + + com.google.guava + guava + + + + + + + org.codehaus.mojo + flatten-maven-plugin + + + + diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/CreateMigrationWorkflowRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/CreateMigrationWorkflowRequest.java new file mode 100644 index 0000000..7ff6d18 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/CreateMigrationWorkflowRequest.java @@ -0,0 +1,993 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * Request to create a migration workflow resource.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest} + */ +public final class CreateMigrationWorkflowRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) + CreateMigrationWorkflowRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CreateMigrationWorkflowRequest.newBuilder() to construct. + private CreateMigrationWorkflowRequest( + com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private CreateMigrationWorkflowRequest() { + parent_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new CreateMigrationWorkflowRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private CreateMigrationWorkflowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + parent_ = s; + break; + } + case 18: + { + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder subBuilder = + null; + if (migrationWorkflow_ != null) { + subBuilder = migrationWorkflow_.toBuilder(); + } + migrationWorkflow_ = + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(migrationWorkflow_); + migrationWorkflow_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_CreateMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_CreateMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest.Builder + .class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + private volatile java.lang.Object parent_; + /** + * + * + *
+   * Required. The name of the project to which this migration workflow belongs.
+   * Example: `projects/foo/locations/bar`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the project to which this migration workflow belongs.
+   * Example: `projects/foo/locations/bar`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int MIGRATION_WORKFLOW_FIELD_NUMBER = 2; + private com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migrationWorkflow_; + /** + * + * + *
+   * Required. The migration workflow to create.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the migrationWorkflow field is set. + */ + @java.lang.Override + public boolean hasMigrationWorkflow() { + return migrationWorkflow_ != null; + } + /** + * + * + *
+   * Required. The migration workflow to create.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The migrationWorkflow. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflow() { + return migrationWorkflow_ == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance() + : migrationWorkflow_; + } + /** + * + * + *
+   * Required. The migration workflow to create.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder + getMigrationWorkflowOrBuilder() { + return getMigrationWorkflow(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getParentBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (migrationWorkflow_ != null) { + output.writeMessage(2, getMigrationWorkflow()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getParentBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (migrationWorkflow_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMigrationWorkflow()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest other = + (com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasMigrationWorkflow() != other.hasMigrationWorkflow()) return false; + if (hasMigrationWorkflow()) { + if (!getMigrationWorkflow().equals(other.getMigrationWorkflow())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasMigrationWorkflow()) { + hash = (37 * hash) + MIGRATION_WORKFLOW_FIELD_NUMBER; + hash = (53 * hash) + getMigrationWorkflow().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request to create a migration workflow resource.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_CreateMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_CreateMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + parent_ = ""; + + if (migrationWorkflowBuilder_ == null) { + migrationWorkflow_ = null; + } else { + migrationWorkflow_ = null; + migrationWorkflowBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_CreateMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest build() { + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest result = + new com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest(this); + result.parent_ = parent_; + if (migrationWorkflowBuilder_ == null) { + result.migrationWorkflow_ = migrationWorkflow_; + } else { + result.migrationWorkflow_ = migrationWorkflowBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + .getDefaultInstance()) return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + onChanged(); + } + if (other.hasMigrationWorkflow()) { + mergeMigrationWorkflow(other.getMigrationWorkflow()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The name of the project to which this migration workflow belongs.
+     * Example: `projects/foo/locations/bar`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the project to which this migration workflow belongs.
+     * Example: `projects/foo/locations/bar`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the project to which this migration workflow belongs.
+     * Example: `projects/foo/locations/bar`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + parent_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the project to which this migration workflow belongs.
+     * Example: `projects/foo/locations/bar`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + + parent_ = getDefaultInstance().getParent(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the project to which this migration workflow belongs.
+     * Example: `projects/foo/locations/bar`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + parent_ = value; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migrationWorkflow_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder> + migrationWorkflowBuilder_; + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the migrationWorkflow field is set. + */ + public boolean hasMigrationWorkflow() { + return migrationWorkflowBuilder_ != null || migrationWorkflow_ != null; + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The migrationWorkflow. + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflow() { + if (migrationWorkflowBuilder_ == null) { + return migrationWorkflow_ == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance() + : migrationWorkflow_; + } else { + return migrationWorkflowBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow value) { + if (migrationWorkflowBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + migrationWorkflow_ = value; + onChanged(); + } else { + migrationWorkflowBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder builderForValue) { + if (migrationWorkflowBuilder_ == null) { + migrationWorkflow_ = builderForValue.build(); + onChanged(); + } else { + migrationWorkflowBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeMigrationWorkflow( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow value) { + if (migrationWorkflowBuilder_ == null) { + if (migrationWorkflow_ != null) { + migrationWorkflow_ = + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.newBuilder( + migrationWorkflow_) + .mergeFrom(value) + .buildPartial(); + } else { + migrationWorkflow_ = value; + } + onChanged(); + } else { + migrationWorkflowBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearMigrationWorkflow() { + if (migrationWorkflowBuilder_ == null) { + migrationWorkflow_ = null; + onChanged(); + } else { + migrationWorkflow_ = null; + migrationWorkflowBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder + getMigrationWorkflowBuilder() { + + onChanged(); + return getMigrationWorkflowFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder + getMigrationWorkflowOrBuilder() { + if (migrationWorkflowBuilder_ != null) { + return migrationWorkflowBuilder_.getMessageOrBuilder(); + } else { + return migrationWorkflow_ == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance() + : migrationWorkflow_; + } + } + /** + * + * + *
+     * Required. The migration workflow to create.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder> + getMigrationWorkflowFieldBuilder() { + if (migrationWorkflowBuilder_ == null) { + migrationWorkflowBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder>( + getMigrationWorkflow(), getParentForChildren(), isClean()); + migrationWorkflow_ = null; + } + return migrationWorkflowBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CreateMigrationWorkflowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CreateMigrationWorkflowRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/CreateMigrationWorkflowRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/CreateMigrationWorkflowRequestOrBuilder.java new file mode 100644 index 0000000..fff1ec9 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/CreateMigrationWorkflowRequestOrBuilder.java @@ -0,0 +1,98 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface CreateMigrationWorkflowRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the project to which this migration workflow belongs.
+   * Example: `projects/foo/locations/bar`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The name of the project to which this migration workflow belongs.
+   * Example: `projects/foo/locations/bar`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * Required. The migration workflow to create.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the migrationWorkflow field is set. + */ + boolean hasMigrationWorkflow(); + /** + * + * + *
+   * Required. The migration workflow to create.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The migrationWorkflow. + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflow(); + /** + * + * + *
+   * Required. The migration workflow to create.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflow = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder + getMigrationWorkflowOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/DeleteMigrationWorkflowRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/DeleteMigrationWorkflowRequest.java new file mode 100644 index 0000000..7fda3f0 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/DeleteMigrationWorkflowRequest.java @@ -0,0 +1,683 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A request to delete a previously created migration workflow.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest} + */ +public final class DeleteMigrationWorkflowRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) + DeleteMigrationWorkflowRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use DeleteMigrationWorkflowRequest.newBuilder() to construct. + private DeleteMigrationWorkflowRequest( + com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private DeleteMigrationWorkflowRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new DeleteMigrationWorkflowRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private DeleteMigrationWorkflowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_DeleteMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_DeleteMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest.Builder + .class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest other = + (com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A request to delete a previously created migration workflow.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_DeleteMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_DeleteMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_DeleteMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest build() { + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest result = + new com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest(this); + result.name_ = name_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + .getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DeleteMigrationWorkflowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeleteMigrationWorkflowRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/DeleteMigrationWorkflowRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/DeleteMigrationWorkflowRequestOrBuilder.java new file mode 100644 index 0000000..2e7153d --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/DeleteMigrationWorkflowRequestOrBuilder.java @@ -0,0 +1,56 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface DeleteMigrationWorkflowRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorDetail.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorDetail.java new file mode 100644 index 0000000..f6ad60a --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorDetail.java @@ -0,0 +1,1030 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_error_details.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * Provides details for errors, e.g. issues that where encountered when
+ * processing a subtask.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ErrorDetail} + */ +public final class ErrorDetail extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ErrorDetail) + ErrorDetailOrBuilder { + private static final long serialVersionUID = 0L; + // Use ErrorDetail.newBuilder() to construct. + private ErrorDetail(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ErrorDetail() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ErrorDetail(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ErrorDetail( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder subBuilder = null; + if (location_ != null) { + subBuilder = location_.toBuilder(); + } + location_ = + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(location_); + location_ = subBuilder.buildPartial(); + } + + break; + } + case 18: + { + com.google.rpc.ErrorInfo.Builder subBuilder = null; + if (errorInfo_ != null) { + subBuilder = errorInfo_.toBuilder(); + } + errorInfo_ = input.readMessage(com.google.rpc.ErrorInfo.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(errorInfo_); + errorInfo_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorDetail_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorDetail_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.class, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder.class); + } + + public static final int LOCATION_FIELD_NUMBER = 1; + private com.google.cloud.bigquery.migration.v2alpha.ErrorLocation location_; + /** + * + * + *
+   * Optional. The exact location within the resource (if applicable).
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the location field is set. + */ + @java.lang.Override + public boolean hasLocation() { + return location_ != null; + } + /** + * + * + *
+   * Optional. The exact location within the resource (if applicable).
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The location. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation getLocation() { + return location_ == null + ? com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.getDefaultInstance() + : location_; + } + /** + * + * + *
+   * Optional. The exact location within the resource (if applicable).
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder getLocationOrBuilder() { + return getLocation(); + } + + public static final int ERROR_INFO_FIELD_NUMBER = 2; + private com.google.rpc.ErrorInfo errorInfo_; + /** + * + * + *
+   * Required. Describes the cause of the error with structured detail.
+   * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return Whether the errorInfo field is set. + */ + @java.lang.Override + public boolean hasErrorInfo() { + return errorInfo_ != null; + } + /** + * + * + *
+   * Required. Describes the cause of the error with structured detail.
+   * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The errorInfo. + */ + @java.lang.Override + public com.google.rpc.ErrorInfo getErrorInfo() { + return errorInfo_ == null ? com.google.rpc.ErrorInfo.getDefaultInstance() : errorInfo_; + } + /** + * + * + *
+   * Required. Describes the cause of the error with structured detail.
+   * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + @java.lang.Override + public com.google.rpc.ErrorInfoOrBuilder getErrorInfoOrBuilder() { + return getErrorInfo(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (location_ != null) { + output.writeMessage(1, getLocation()); + } + if (errorInfo_ != null) { + output.writeMessage(2, getErrorInfo()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (location_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getLocation()); + } + if (errorInfo_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getErrorInfo()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.ErrorDetail)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail other = + (com.google.cloud.bigquery.migration.v2alpha.ErrorDetail) obj; + + if (hasLocation() != other.hasLocation()) return false; + if (hasLocation()) { + if (!getLocation().equals(other.getLocation())) return false; + } + if (hasErrorInfo() != other.hasErrorInfo()) return false; + if (hasErrorInfo()) { + if (!getErrorInfo().equals(other.getErrorInfo())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasLocation()) { + hash = (37 * hash) + LOCATION_FIELD_NUMBER; + hash = (53 * hash) + getLocation().hashCode(); + } + if (hasErrorInfo()) { + hash = (37 * hash) + ERROR_INFO_FIELD_NUMBER; + hash = (53 * hash) + getErrorInfo().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Provides details for errors, e.g. issues that where encountered when
+   * processing a subtask.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ErrorDetail} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ErrorDetail) + com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorDetail_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorDetail_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.class, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (locationBuilder_ == null) { + location_ = null; + } else { + location_ = null; + locationBuilder_ = null; + } + if (errorInfoBuilder_ == null) { + errorInfo_ = null; + } else { + errorInfo_ = null; + errorInfoBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorDetail_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail build() { + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail result = + new com.google.cloud.bigquery.migration.v2alpha.ErrorDetail(this); + if (locationBuilder_ == null) { + result.location_ = location_; + } else { + result.location_ = locationBuilder_.build(); + } + if (errorInfoBuilder_ == null) { + result.errorInfo_ = errorInfo_; + } else { + result.errorInfo_ = errorInfoBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.ErrorDetail) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.ErrorDetail) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.ErrorDetail other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.getDefaultInstance()) + return this; + if (other.hasLocation()) { + mergeLocation(other.getLocation()); + } + if (other.hasErrorInfo()) { + mergeErrorInfo(other.getErrorInfo()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ErrorDetail) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private com.google.cloud.bigquery.migration.v2alpha.ErrorLocation location_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder> + locationBuilder_; + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the location field is set. + */ + public boolean hasLocation() { + return locationBuilder_ != null || location_ != null; + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The location. + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation getLocation() { + if (locationBuilder_ == null) { + return location_ == null + ? com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.getDefaultInstance() + : location_; + } else { + return locationBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setLocation(com.google.cloud.bigquery.migration.v2alpha.ErrorLocation value) { + if (locationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + location_ = value; + onChanged(); + } else { + locationBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setLocation( + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder builderForValue) { + if (locationBuilder_ == null) { + location_ = builderForValue.build(); + onChanged(); + } else { + locationBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder mergeLocation(com.google.cloud.bigquery.migration.v2alpha.ErrorLocation value) { + if (locationBuilder_ == null) { + if (location_ != null) { + location_ = + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.newBuilder(location_) + .mergeFrom(value) + .buildPartial(); + } else { + location_ = value; + } + onChanged(); + } else { + locationBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder clearLocation() { + if (locationBuilder_ == null) { + location_ = null; + onChanged(); + } else { + location_ = null; + locationBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder getLocationBuilder() { + + onChanged(); + return getLocationFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder + getLocationOrBuilder() { + if (locationBuilder_ != null) { + return locationBuilder_.getMessageOrBuilder(); + } else { + return location_ == null + ? com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.getDefaultInstance() + : location_; + } + } + /** + * + * + *
+     * Optional. The exact location within the resource (if applicable).
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder> + getLocationFieldBuilder() { + if (locationBuilder_ == null) { + locationBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder>( + getLocation(), getParentForChildren(), isClean()); + location_ = null; + } + return locationBuilder_; + } + + private com.google.rpc.ErrorInfo errorInfo_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder> + errorInfoBuilder_; + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return Whether the errorInfo field is set. + */ + public boolean hasErrorInfo() { + return errorInfoBuilder_ != null || errorInfo_ != null; + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The errorInfo. + */ + public com.google.rpc.ErrorInfo getErrorInfo() { + if (errorInfoBuilder_ == null) { + return errorInfo_ == null ? com.google.rpc.ErrorInfo.getDefaultInstance() : errorInfo_; + } else { + return errorInfoBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + public Builder setErrorInfo(com.google.rpc.ErrorInfo value) { + if (errorInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + errorInfo_ = value; + onChanged(); + } else { + errorInfoBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + public Builder setErrorInfo(com.google.rpc.ErrorInfo.Builder builderForValue) { + if (errorInfoBuilder_ == null) { + errorInfo_ = builderForValue.build(); + onChanged(); + } else { + errorInfoBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + public Builder mergeErrorInfo(com.google.rpc.ErrorInfo value) { + if (errorInfoBuilder_ == null) { + if (errorInfo_ != null) { + errorInfo_ = + com.google.rpc.ErrorInfo.newBuilder(errorInfo_).mergeFrom(value).buildPartial(); + } else { + errorInfo_ = value; + } + onChanged(); + } else { + errorInfoBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + public Builder clearErrorInfo() { + if (errorInfoBuilder_ == null) { + errorInfo_ = null; + onChanged(); + } else { + errorInfo_ = null; + errorInfoBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + public com.google.rpc.ErrorInfo.Builder getErrorInfoBuilder() { + + onChanged(); + return getErrorInfoFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + public com.google.rpc.ErrorInfoOrBuilder getErrorInfoOrBuilder() { + if (errorInfoBuilder_ != null) { + return errorInfoBuilder_.getMessageOrBuilder(); + } else { + return errorInfo_ == null ? com.google.rpc.ErrorInfo.getDefaultInstance() : errorInfo_; + } + } + /** + * + * + *
+     * Required. Describes the cause of the error with structured detail.
+     * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder> + getErrorInfoFieldBuilder() { + if (errorInfoBuilder_ == null) { + errorInfoBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder>( + getErrorInfo(), getParentForChildren(), isClean()); + errorInfo_ = null; + } + return errorInfoBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ErrorDetail) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ErrorDetail) + private static final com.google.cloud.bigquery.migration.v2alpha.ErrorDetail DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.ErrorDetail(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorDetail getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ErrorDetail parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ErrorDetail(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorDetailOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorDetailOrBuilder.java new file mode 100644 index 0000000..1173012 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorDetailOrBuilder.java @@ -0,0 +1,101 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_error_details.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ErrorDetailOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ErrorDetail) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Optional. The exact location within the resource (if applicable).
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the location field is set. + */ + boolean hasLocation(); + /** + * + * + *
+   * Optional. The exact location within the resource (if applicable).
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The location. + */ + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation getLocation(); + /** + * + * + *
+   * Optional. The exact location within the resource (if applicable).
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.ErrorLocation location = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder getLocationOrBuilder(); + + /** + * + * + *
+   * Required. Describes the cause of the error with structured detail.
+   * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return Whether the errorInfo field is set. + */ + boolean hasErrorInfo(); + /** + * + * + *
+   * Required. Describes the cause of the error with structured detail.
+   * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The errorInfo. + */ + com.google.rpc.ErrorInfo getErrorInfo(); + /** + * + * + *
+   * Required. Describes the cause of the error with structured detail.
+   * 
+ * + * .google.rpc.ErrorInfo error_info = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + com.google.rpc.ErrorInfoOrBuilder getErrorInfoOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorLocation.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorLocation.java new file mode 100644 index 0000000..6b42ec6 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorLocation.java @@ -0,0 +1,645 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_error_details.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * Holds information about where the error is located.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ErrorLocation} + */ +public final class ErrorLocation extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ErrorLocation) + ErrorLocationOrBuilder { + private static final long serialVersionUID = 0L; + // Use ErrorLocation.newBuilder() to construct. + private ErrorLocation(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ErrorLocation() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ErrorLocation(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ErrorLocation( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: + { + line_ = input.readInt32(); + break; + } + case 16: + { + column_ = input.readInt32(); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorLocation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorLocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.class, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder.class); + } + + public static final int LINE_FIELD_NUMBER = 1; + private int line_; + /** + * + * + *
+   * Optional. If applicable, denotes the line where the error occurred. A zero value
+   * means that there is no line information.
+   * 
+ * + * int32 line = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The line. + */ + @java.lang.Override + public int getLine() { + return line_; + } + + public static final int COLUMN_FIELD_NUMBER = 2; + private int column_; + /** + * + * + *
+   * Optional. If applicable, denotes the column where the error occurred. A zero value
+   * means that there is no columns information.
+   * 
+ * + * int32 column = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The column. + */ + @java.lang.Override + public int getColumn() { + return column_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (line_ != 0) { + output.writeInt32(1, line_); + } + if (column_ != 0) { + output.writeInt32(2, column_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (line_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, line_); + } + if (column_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, column_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.ErrorLocation)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation other = + (com.google.cloud.bigquery.migration.v2alpha.ErrorLocation) obj; + + if (getLine() != other.getLine()) return false; + if (getColumn() != other.getColumn()) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + LINE_FIELD_NUMBER; + hash = (53 * hash) + getLine(); + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumn(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Holds information about where the error is located.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ErrorLocation} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ErrorLocation) + com.google.cloud.bigquery.migration.v2alpha.ErrorLocationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorLocation_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorLocation_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.class, + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + line_ = 0; + + column_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ErrorLocation_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation build() { + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation result = + new com.google.cloud.bigquery.migration.v2alpha.ErrorLocation(this); + result.line_ = line_; + result.column_ = column_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.ErrorLocation) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.ErrorLocation) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.ErrorLocation other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.ErrorLocation.getDefaultInstance()) + return this; + if (other.getLine() != 0) { + setLine(other.getLine()); + } + if (other.getColumn() != 0) { + setColumn(other.getColumn()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ErrorLocation parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ErrorLocation) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int line_; + /** + * + * + *
+     * Optional. If applicable, denotes the line where the error occurred. A zero value
+     * means that there is no line information.
+     * 
+ * + * int32 line = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The line. + */ + @java.lang.Override + public int getLine() { + return line_; + } + /** + * + * + *
+     * Optional. If applicable, denotes the line where the error occurred. A zero value
+     * means that there is no line information.
+     * 
+ * + * int32 line = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The line to set. + * @return This builder for chaining. + */ + public Builder setLine(int value) { + + line_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. If applicable, denotes the line where the error occurred. A zero value
+     * means that there is no line information.
+     * 
+ * + * int32 line = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return This builder for chaining. + */ + public Builder clearLine() { + + line_ = 0; + onChanged(); + return this; + } + + private int column_; + /** + * + * + *
+     * Optional. If applicable, denotes the column where the error occurred. A zero value
+     * means that there is no columns information.
+     * 
+ * + * int32 column = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The column. + */ + @java.lang.Override + public int getColumn() { + return column_; + } + /** + * + * + *
+     * Optional. If applicable, denotes the column where the error occurred. A zero value
+     * means that there is no columns information.
+     * 
+ * + * int32 column = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The column to set. + * @return This builder for chaining. + */ + public Builder setColumn(int value) { + + column_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. If applicable, denotes the column where the error occurred. A zero value
+     * means that there is no columns information.
+     * 
+ * + * int32 column = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return This builder for chaining. + */ + public Builder clearColumn() { + + column_ = 0; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ErrorLocation) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ErrorLocation) + private static final com.google.cloud.bigquery.migration.v2alpha.ErrorLocation DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.ErrorLocation(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ErrorLocation getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ErrorLocation parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ErrorLocation(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorLocation getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorLocationOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorLocationOrBuilder.java new file mode 100644 index 0000000..ed2cd6a --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ErrorLocationOrBuilder.java @@ -0,0 +1,53 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_error_details.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ErrorLocationOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ErrorLocation) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Optional. If applicable, denotes the line where the error occurred. A zero value
+   * means that there is no line information.
+   * 
+ * + * int32 line = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The line. + */ + int getLine(); + + /** + * + * + *
+   * Optional. If applicable, denotes the column where the error occurred. A zero value
+   * means that there is no columns information.
+   * 
+ * + * int32 column = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The column. + */ + int getColumn(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationSubtaskRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationSubtaskRequest.java new file mode 100644 index 0000000..1a407ca --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationSubtaskRequest.java @@ -0,0 +1,950 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A request to get a previously created migration subtasks.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest} + */ +public final class GetMigrationSubtaskRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) + GetMigrationSubtaskRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetMigrationSubtaskRequest.newBuilder() to construct. + private GetMigrationSubtaskRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private GetMigrationSubtaskRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new GetMigrationSubtaskRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private GetMigrationSubtaskRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: + { + com.google.protobuf.FieldMask.Builder subBuilder = null; + if (readMask_ != null) { + subBuilder = readMask_.toBuilder(); + } + readMask_ = + input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(readMask_); + readMask_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationSubtaskRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationSubtaskRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest.class, + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * + * + *
+   * Required. The unique identifier for the migration subtask.
+   * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The unique identifier for the migration subtask.
+   * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int READ_MASK_FIELD_NUMBER = 2; + private com.google.protobuf.FieldMask readMask_; + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the readMask field is set. + */ + @java.lang.Override + public boolean hasReadMask() { + return readMask_ != null; + } + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The readMask. + */ + @java.lang.Override + public com.google.protobuf.FieldMask getReadMask() { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + @java.lang.Override + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + return getReadMask(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (readMask_ != null) { + output.writeMessage(2, getReadMask()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (readMask_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReadMask()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest other = + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (hasReadMask() != other.hasReadMask()) return false; + if (hasReadMask()) { + if (!getReadMask().equals(other.getReadMask())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasReadMask()) { + hash = (37 * hash) + READ_MASK_FIELD_NUMBER; + hash = (53 * hash) + getReadMask().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A request to get a previously created migration subtasks.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationSubtaskRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationSubtaskRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest.class, + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest.Builder.class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + if (readMaskBuilder_ == null) { + readMask_ = null; + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationSubtaskRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest build() { + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest result = + new com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest(this); + result.name_ = name_; + if (readMaskBuilder_ == null) { + result.readMask_ = readMask_; + } else { + result.readMask_ = readMaskBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + .getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.hasReadMask()) { + mergeReadMask(other.getReadMask()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The unique identifier for the migration subtask.
+     * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration subtask.
+     * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration subtask.
+     * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration subtask.
+     * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration subtask.
+     * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.FieldMask readMask_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + readMaskBuilder_; + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the readMask field is set. + */ + public boolean hasReadMask() { + return readMaskBuilder_ != null || readMask_ != null; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The readMask. + */ + public com.google.protobuf.FieldMask getReadMask() { + if (readMaskBuilder_ == null) { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } else { + return readMaskBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + readMask_ = value; + onChanged(); + } else { + readMaskBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setReadMask(com.google.protobuf.FieldMask.Builder builderForValue) { + if (readMaskBuilder_ == null) { + readMask_ = builderForValue.build(); + onChanged(); + } else { + readMaskBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder mergeReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (readMask_ != null) { + readMask_ = + com.google.protobuf.FieldMask.newBuilder(readMask_).mergeFrom(value).buildPartial(); + } else { + readMask_ = value; + } + onChanged(); + } else { + readMaskBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder clearReadMask() { + if (readMaskBuilder_ == null) { + readMask_ = null; + onChanged(); + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.FieldMask.Builder getReadMaskBuilder() { + + onChanged(); + return getReadMaskFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + if (readMaskBuilder_ != null) { + return readMaskBuilder_.getMessageOrBuilder(); + } else { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + getReadMaskFieldBuilder() { + if (readMaskBuilder_ == null) { + readMaskBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder>( + getReadMask(), getParentForChildren(), isClean()); + readMask_ = null; + } + return readMaskBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetMigrationSubtaskRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetMigrationSubtaskRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationSubtaskRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationSubtaskRequestOrBuilder.java new file mode 100644 index 0000000..0d21a98 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationSubtaskRequestOrBuilder.java @@ -0,0 +1,94 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface GetMigrationSubtaskRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The unique identifier for the migration subtask.
+   * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The unique identifier for the migration subtask.
+   * Example: `projects/123/locations/us/workflows/1234/subtasks/543`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the readMask field is set. + */ + boolean hasReadMask(); + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The readMask. + */ + com.google.protobuf.FieldMask getReadMask(); + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationWorkflowRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationWorkflowRequest.java new file mode 100644 index 0000000..e7993f9 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationWorkflowRequest.java @@ -0,0 +1,941 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A request to get a previously created migration workflow.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest} + */ +public final class GetMigrationWorkflowRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) + GetMigrationWorkflowRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetMigrationWorkflowRequest.newBuilder() to construct. + private GetMigrationWorkflowRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private GetMigrationWorkflowRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new GetMigrationWorkflowRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private GetMigrationWorkflowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: + { + com.google.protobuf.FieldMask.Builder subBuilder = null; + if (readMask_ != null) { + subBuilder = readMask_.toBuilder(); + } + readMask_ = + input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(readMask_); + readMask_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int READ_MASK_FIELD_NUMBER = 2; + private com.google.protobuf.FieldMask readMask_; + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return Whether the readMask field is set. + */ + @java.lang.Override + public boolean hasReadMask() { + return readMask_ != null; + } + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return The readMask. + */ + @java.lang.Override + public com.google.protobuf.FieldMask getReadMask() { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + @java.lang.Override + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + return getReadMask(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (readMask_ != null) { + output.writeMessage(2, getReadMask()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (readMask_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReadMask()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest other = + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (hasReadMask() != other.hasReadMask()) return false; + if (hasReadMask()) { + if (!getReadMask().equals(other.getReadMask())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasReadMask()) { + hash = (37 * hash) + READ_MASK_FIELD_NUMBER; + hash = (53 * hash) + getReadMask().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A request to get a previously created migration workflow.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + if (readMaskBuilder_ == null) { + readMask_ = null; + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_GetMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest build() { + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest result = + new com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest(this); + result.name_ = name_; + if (readMaskBuilder_ == null) { + result.readMask_ = readMask_; + } else { + result.readMask_ = readMaskBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + .getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (other.hasReadMask()) { + mergeReadMask(other.getReadMask()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.FieldMask readMask_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + readMaskBuilder_; + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return Whether the readMask field is set. + */ + public boolean hasReadMask() { + return readMaskBuilder_ != null || readMask_ != null; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return The readMask. + */ + public com.google.protobuf.FieldMask getReadMask() { + if (readMaskBuilder_ == null) { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } else { + return readMaskBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder setReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + readMask_ = value; + onChanged(); + } else { + readMaskBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder setReadMask(com.google.protobuf.FieldMask.Builder builderForValue) { + if (readMaskBuilder_ == null) { + readMask_ = builderForValue.build(); + onChanged(); + } else { + readMaskBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder mergeReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (readMask_ != null) { + readMask_ = + com.google.protobuf.FieldMask.newBuilder(readMask_).mergeFrom(value).buildPartial(); + } else { + readMask_ = value; + } + onChanged(); + } else { + readMaskBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder clearReadMask() { + if (readMaskBuilder_ == null) { + readMask_ = null; + onChanged(); + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public com.google.protobuf.FieldMask.Builder getReadMaskBuilder() { + + onChanged(); + return getReadMaskFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + if (readMaskBuilder_ != null) { + return readMaskBuilder_.getMessageOrBuilder(); + } else { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + getReadMaskFieldBuilder() { + if (readMaskBuilder_ == null) { + readMaskBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder>( + getReadMask(), getParentForChildren(), isClean()); + readMask_ = null; + } + return readMaskBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetMigrationWorkflowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetMigrationWorkflowRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationWorkflowRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationWorkflowRequestOrBuilder.java new file mode 100644 index 0000000..d512a7d --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/GetMigrationWorkflowRequestOrBuilder.java @@ -0,0 +1,91 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface GetMigrationWorkflowRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return Whether the readMask field is set. + */ + boolean hasReadMask(); + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return The readMask. + */ + com.google.protobuf.FieldMask getReadMask(); + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksRequest.java new file mode 100644 index 0000000..1e81f52 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksRequest.java @@ -0,0 +1,1441 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A request to list previously created migration subtasks.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest} + */ +public final class ListMigrationSubtasksRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) + ListMigrationSubtasksRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListMigrationSubtasksRequest.newBuilder() to construct. + private ListMigrationSubtasksRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListMigrationSubtasksRequest() { + parent_ = ""; + pageToken_ = ""; + filter_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListMigrationSubtasksRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ListMigrationSubtasksRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + parent_ = s; + break; + } + case 18: + { + com.google.protobuf.FieldMask.Builder subBuilder = null; + if (readMask_ != null) { + subBuilder = readMask_.toBuilder(); + } + readMask_ = + input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(readMask_); + readMask_ = subBuilder.buildPartial(); + } + + break; + } + case 24: + { + pageSize_ = input.readInt32(); + break; + } + case 34: + { + java.lang.String s = input.readStringRequireUtf8(); + + pageToken_ = s; + break; + } + case 42: + { + java.lang.String s = input.readStringRequireUtf8(); + + filter_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + private volatile java.lang.Object parent_; + /** + * + * + *
+   * Required. The migration task of the subtasks to list.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The migration task of the subtasks to list.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int READ_MASK_FIELD_NUMBER = 2; + private com.google.protobuf.FieldMask readMask_; + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the readMask field is set. + */ + @java.lang.Override + public boolean hasReadMask() { + return readMask_ != null; + } + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The readMask. + */ + @java.lang.Override + public com.google.protobuf.FieldMask getReadMask() { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + @java.lang.Override + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + return getReadMask(); + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 3; + private int pageSize_; + /** + * + * + *
+   * Optional. The maximum number of migration tasks to return. The service may return
+   * fewer than this number.
+   * 
+ * + * int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 4; + private volatile java.lang.Object pageToken_; + /** + * + * + *
+   * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationSubtasks`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The pageToken. + */ + @java.lang.Override + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + * + * + *
+   * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationSubtasks`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for pageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FILTER_FIELD_NUMBER = 5; + private volatile java.lang.Object filter_; + /** + * + * + *
+   * Optional. The filter to apply. This can be used to get the subtasks of a specific
+   * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+   * task ID (not the name in the named map).
+   * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The filter. + */ + @java.lang.Override + public java.lang.String getFilter() { + java.lang.Object ref = filter_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + filter_ = s; + return s; + } + } + /** + * + * + *
+   * Optional. The filter to apply. This can be used to get the subtasks of a specific
+   * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+   * task ID (not the name in the named map).
+   * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for filter. + */ + @java.lang.Override + public com.google.protobuf.ByteString getFilterBytes() { + java.lang.Object ref = filter_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + filter_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getParentBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (readMask_ != null) { + output.writeMessage(2, getReadMask()); + } + if (pageSize_ != 0) { + output.writeInt32(3, pageSize_); + } + if (!getPageTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); + } + if (!getFilterBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, filter_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getParentBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (readMask_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReadMask()); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); + } + if (!getPageTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); + } + if (!getFilterBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, filter_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest other = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasReadMask() != other.hasReadMask()) return false; + if (hasReadMask()) { + if (!getReadMask().equals(other.getReadMask())) return false; + } + if (getPageSize() != other.getPageSize()) return false; + if (!getPageToken().equals(other.getPageToken())) return false; + if (!getFilter().equals(other.getFilter())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasReadMask()) { + hash = (37 * hash) + READ_MASK_FIELD_NUMBER; + hash = (53 * hash) + getReadMask().hashCode(); + } + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A request to list previously created migration subtasks.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + parent_ = ""; + + if (readMaskBuilder_ == null) { + readMask_ = null; + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + pageSize_ = 0; + + pageToken_ = ""; + + filter_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest build() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest result = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest(this); + result.parent_ = parent_; + if (readMaskBuilder_ == null) { + result.readMask_ = readMask_; + } else { + result.readMask_ = readMaskBuilder_.build(); + } + result.pageSize_ = pageSize_; + result.pageToken_ = pageToken_; + result.filter_ = filter_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + .getDefaultInstance()) return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + onChanged(); + } + if (other.hasReadMask()) { + mergeReadMask(other.getReadMask()); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + onChanged(); + } + if (!other.getFilter().isEmpty()) { + filter_ = other.filter_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The migration task of the subtasks to list.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The migration task of the subtasks to list.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The migration task of the subtasks to list.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + parent_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The migration task of the subtasks to list.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + + parent_ = getDefaultInstance().getParent(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The migration task of the subtasks to list.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + parent_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.FieldMask readMask_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + readMaskBuilder_; + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the readMask field is set. + */ + public boolean hasReadMask() { + return readMaskBuilder_ != null || readMask_ != null; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The readMask. + */ + public com.google.protobuf.FieldMask getReadMask() { + if (readMaskBuilder_ == null) { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } else { + return readMaskBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + readMask_ = value; + onChanged(); + } else { + readMaskBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setReadMask(com.google.protobuf.FieldMask.Builder builderForValue) { + if (readMaskBuilder_ == null) { + readMask_ = builderForValue.build(); + onChanged(); + } else { + readMaskBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder mergeReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (readMask_ != null) { + readMask_ = + com.google.protobuf.FieldMask.newBuilder(readMask_).mergeFrom(value).buildPartial(); + } else { + readMask_ = value; + } + onChanged(); + } else { + readMaskBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder clearReadMask() { + if (readMaskBuilder_ == null) { + readMask_ = null; + onChanged(); + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.FieldMask.Builder getReadMaskBuilder() { + + onChanged(); + return getReadMaskFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + if (readMaskBuilder_ != null) { + return readMaskBuilder_.getMessageOrBuilder(); + } else { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + } + /** + * + * + *
+     * Optional. The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + getReadMaskFieldBuilder() { + if (readMaskBuilder_ == null) { + readMaskBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder>( + getReadMask(), getParentForChildren(), isClean()); + readMask_ = null; + } + return readMaskBuilder_; + } + + private int pageSize_; + /** + * + * + *
+     * Optional. The maximum number of migration tasks to return. The service may return
+     * fewer than this number.
+     * 
+ * + * int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + /** + * + * + *
+     * Optional. The maximum number of migration tasks to return. The service may return
+     * fewer than this number.
+     * 
+ * + * int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The pageSize to set. + * @return This builder for chaining. + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. The maximum number of migration tasks to return. The service may return
+     * fewer than this number.
+     * 
+ * + * int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return This builder for chaining. + */ + public Builder clearPageSize() { + + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + * + * + *
+     * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationSubtasks`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The pageToken. + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationSubtasks`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for pageToken. + */ + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationSubtasks`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + pageToken_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationSubtasks`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return This builder for chaining. + */ + public Builder clearPageToken() { + + pageToken_ = getDefaultInstance().getPageToken(); + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationSubtasks`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The bytes for pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + pageToken_ = value; + onChanged(); + return this; + } + + private java.lang.Object filter_ = ""; + /** + * + * + *
+     * Optional. The filter to apply. This can be used to get the subtasks of a specific
+     * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+     * task ID (not the name in the named map).
+     * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The filter. + */ + public java.lang.String getFilter() { + java.lang.Object ref = filter_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + filter_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Optional. The filter to apply. This can be used to get the subtasks of a specific
+     * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+     * task ID (not the name in the named map).
+     * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for filter. + */ + public com.google.protobuf.ByteString getFilterBytes() { + java.lang.Object ref = filter_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + filter_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Optional. The filter to apply. This can be used to get the subtasks of a specific
+     * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+     * task ID (not the name in the named map).
+     * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The filter to set. + * @return This builder for chaining. + */ + public Builder setFilter(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + filter_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. The filter to apply. This can be used to get the subtasks of a specific
+     * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+     * task ID (not the name in the named map).
+     * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return This builder for chaining. + */ + public Builder clearFilter() { + + filter_ = getDefaultInstance().getFilter(); + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. The filter to apply. This can be used to get the subtasks of a specific
+     * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+     * task ID (not the name in the named map).
+     * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The bytes for filter to set. + * @return This builder for chaining. + */ + public Builder setFilterBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + filter_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListMigrationSubtasksRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListMigrationSubtasksRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksRequestOrBuilder.java new file mode 100644 index 0000000..a33eaac --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksRequestOrBuilder.java @@ -0,0 +1,168 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ListMigrationSubtasksRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The migration task of the subtasks to list.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The migration task of the subtasks to list.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the readMask field is set. + */ + boolean hasReadMask(); + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The readMask. + */ + com.google.protobuf.FieldMask getReadMask(); + /** + * + * + *
+   * Optional. The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder(); + + /** + * + * + *
+   * Optional. The maximum number of migration tasks to return. The service may return
+   * fewer than this number.
+   * 
+ * + * int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The pageSize. + */ + int getPageSize(); + + /** + * + * + *
+   * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationSubtasks`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The pageToken. + */ + java.lang.String getPageToken(); + /** + * + * + *
+   * Optional. A page token, received from previous `ListMigrationSubtasks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationSubtasks`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for pageToken. + */ + com.google.protobuf.ByteString getPageTokenBytes(); + + /** + * + * + *
+   * Optional. The filter to apply. This can be used to get the subtasks of a specific
+   * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+   * task ID (not the name in the named map).
+   * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The filter. + */ + java.lang.String getFilter(); + /** + * + * + *
+   * Optional. The filter to apply. This can be used to get the subtasks of a specific
+   * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the
+   * task ID (not the name in the named map).
+   * 
+ * + * string filter = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for filter. + */ + com.google.protobuf.ByteString getFilterBytes(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksResponse.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksResponse.java new file mode 100644 index 0000000..5baaa3e --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksResponse.java @@ -0,0 +1,1232 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * Response object for a `ListMigrationSubtasks` call.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse} + */ +public final class ListMigrationSubtasksResponse extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) + ListMigrationSubtasksResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListMigrationSubtasksResponse.newBuilder() to construct. + private ListMigrationSubtasksResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListMigrationSubtasksResponse() { + migrationSubtasks_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListMigrationSubtasksResponse(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ListMigrationSubtasksResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + migrationSubtasks_ = + new java.util.ArrayList< + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask>(); + mutable_bitField0_ |= 0x00000001; + } + migrationSubtasks_.add( + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.parser(), + extensionRegistry)); + break; + } + case 18: + { + java.lang.String s = input.readStringRequireUtf8(); + + nextPageToken_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + migrationSubtasks_ = java.util.Collections.unmodifiableList(migrationSubtasks_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse.Builder + .class); + } + + public static final int MIGRATION_SUBTASKS_FIELD_NUMBER = 1; + private java.util.List + migrationSubtasks_; + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + @java.lang.Override + public java.util.List + getMigrationSubtasksList() { + return migrationSubtasks_; + } + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + @java.lang.Override + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder> + getMigrationSubtasksOrBuilderList() { + return migrationSubtasks_; + } + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + @java.lang.Override + public int getMigrationSubtasksCount() { + return migrationSubtasks_.size(); + } + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask getMigrationSubtasks( + int index) { + return migrationSubtasks_.get(index); + } + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder + getMigrationSubtasksOrBuilder(int index) { + return migrationSubtasks_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + private volatile java.lang.Object nextPageToken_; + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + @java.lang.Override + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < migrationSubtasks_.size(); i++) { + output.writeMessage(1, migrationSubtasks_.get(i)); + } + if (!getNextPageTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < migrationSubtasks_.size(); i++) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize(1, migrationSubtasks_.get(i)); + } + if (!getNextPageTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse other = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) obj; + + if (!getMigrationSubtasksList().equals(other.getMigrationSubtasksList())) return false; + if (!getNextPageToken().equals(other.getNextPageToken())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getMigrationSubtasksCount() > 0) { + hash = (37 * hash) + MIGRATION_SUBTASKS_FIELD_NUMBER; + hash = (53 * hash) + getMigrationSubtasksList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Response object for a `ListMigrationSubtasks` call.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getMigrationSubtasksFieldBuilder(); + } + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (migrationSubtasksBuilder_ == null) { + migrationSubtasks_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + migrationSubtasksBuilder_.clear(); + } + nextPageToken_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationSubtasksResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse build() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse result = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse(this); + int from_bitField0_ = bitField0_; + if (migrationSubtasksBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + migrationSubtasks_ = java.util.Collections.unmodifiableList(migrationSubtasks_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.migrationSubtasks_ = migrationSubtasks_; + } else { + result.migrationSubtasks_ = migrationSubtasksBuilder_.build(); + } + result.nextPageToken_ = nextPageToken_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + .getDefaultInstance()) return this; + if (migrationSubtasksBuilder_ == null) { + if (!other.migrationSubtasks_.isEmpty()) { + if (migrationSubtasks_.isEmpty()) { + migrationSubtasks_ = other.migrationSubtasks_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.addAll(other.migrationSubtasks_); + } + onChanged(); + } + } else { + if (!other.migrationSubtasks_.isEmpty()) { + if (migrationSubtasksBuilder_.isEmpty()) { + migrationSubtasksBuilder_.dispose(); + migrationSubtasksBuilder_ = null; + migrationSubtasks_ = other.migrationSubtasks_; + bitField0_ = (bitField0_ & ~0x00000001); + migrationSubtasksBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getMigrationSubtasksFieldBuilder() + : null; + } else { + migrationSubtasksBuilder_.addAllMessages(other.migrationSubtasks_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private java.util.List + migrationSubtasks_ = java.util.Collections.emptyList(); + + private void ensureMigrationSubtasksIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + migrationSubtasks_ = + new java.util.ArrayList( + migrationSubtasks_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder> + migrationSubtasksBuilder_; + + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public java.util.List + getMigrationSubtasksList() { + if (migrationSubtasksBuilder_ == null) { + return java.util.Collections.unmodifiableList(migrationSubtasks_); + } else { + return migrationSubtasksBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public int getMigrationSubtasksCount() { + if (migrationSubtasksBuilder_ == null) { + return migrationSubtasks_.size(); + } else { + return migrationSubtasksBuilder_.getCount(); + } + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask getMigrationSubtasks( + int index) { + if (migrationSubtasksBuilder_ == null) { + return migrationSubtasks_.get(index); + } else { + return migrationSubtasksBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder setMigrationSubtasks( + int index, com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask value) { + if (migrationSubtasksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.set(index, value); + onChanged(); + } else { + migrationSubtasksBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder setMigrationSubtasks( + int index, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder builderForValue) { + if (migrationSubtasksBuilder_ == null) { + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.set(index, builderForValue.build()); + onChanged(); + } else { + migrationSubtasksBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder addMigrationSubtasks( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask value) { + if (migrationSubtasksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.add(value); + onChanged(); + } else { + migrationSubtasksBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder addMigrationSubtasks( + int index, com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask value) { + if (migrationSubtasksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.add(index, value); + onChanged(); + } else { + migrationSubtasksBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder addMigrationSubtasks( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder builderForValue) { + if (migrationSubtasksBuilder_ == null) { + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.add(builderForValue.build()); + onChanged(); + } else { + migrationSubtasksBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder addMigrationSubtasks( + int index, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder builderForValue) { + if (migrationSubtasksBuilder_ == null) { + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.add(index, builderForValue.build()); + onChanged(); + } else { + migrationSubtasksBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder addAllMigrationSubtasks( + java.lang.Iterable + values) { + if (migrationSubtasksBuilder_ == null) { + ensureMigrationSubtasksIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, migrationSubtasks_); + onChanged(); + } else { + migrationSubtasksBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder clearMigrationSubtasks() { + if (migrationSubtasksBuilder_ == null) { + migrationSubtasks_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + migrationSubtasksBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public Builder removeMigrationSubtasks(int index) { + if (migrationSubtasksBuilder_ == null) { + ensureMigrationSubtasksIsMutable(); + migrationSubtasks_.remove(index); + onChanged(); + } else { + migrationSubtasksBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder + getMigrationSubtasksBuilder(int index) { + return getMigrationSubtasksFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder + getMigrationSubtasksOrBuilder(int index) { + if (migrationSubtasksBuilder_ == null) { + return migrationSubtasks_.get(index); + } else { + return migrationSubtasksBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder> + getMigrationSubtasksOrBuilderList() { + if (migrationSubtasksBuilder_ != null) { + return migrationSubtasksBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(migrationSubtasks_); + } + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder + addMigrationSubtasksBuilder() { + return getMigrationSubtasksFieldBuilder() + .addBuilder( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.getDefaultInstance()); + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder + addMigrationSubtasksBuilder(int index) { + return getMigrationSubtasksFieldBuilder() + .addBuilder( + index, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.getDefaultInstance()); + } + /** + * + * + *
+     * The migration subtasks for the specified task.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + public java.util.List + getMigrationSubtasksBuilderList() { + return getMigrationSubtasksFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder> + getMigrationSubtasksFieldBuilder() { + if (migrationSubtasksBuilder_ == null) { + migrationSubtasksBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder>( + migrationSubtasks_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + migrationSubtasks_ = null; + } + return migrationSubtasksBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + nextPageToken_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return This builder for chaining. + */ + public Builder clearNextPageToken() { + + nextPageToken_ = getDefaultInstance().getNextPageToken(); + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The bytes for nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + nextPageToken_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) + private static final com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListMigrationSubtasksResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListMigrationSubtasksResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksResponseOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksResponseOrBuilder.java new file mode 100644 index 0000000..9fd052a --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationSubtasksResponseOrBuilder.java @@ -0,0 +1,116 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ListMigrationSubtasksResponseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + java.util.List + getMigrationSubtasksList(); + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask getMigrationSubtasks(int index); + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + int getMigrationSubtasksCount(); + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + java.util.List + getMigrationSubtasksOrBuilderList(); + /** + * + * + *
+   * The migration subtasks for the specified task.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationSubtask migration_subtasks = 1; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder + getMigrationSubtasksOrBuilder(int index); + + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + java.lang.String getNextPageToken(); + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + com.google.protobuf.ByteString getNextPageTokenBytes(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsRequest.java new file mode 100644 index 0000000..54e8c24 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsRequest.java @@ -0,0 +1,1239 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A request to list previously created migration workflows.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest} + */ +public final class ListMigrationWorkflowsRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) + ListMigrationWorkflowsRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListMigrationWorkflowsRequest.newBuilder() to construct. + private ListMigrationWorkflowsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListMigrationWorkflowsRequest() { + parent_ = ""; + pageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListMigrationWorkflowsRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ListMigrationWorkflowsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + parent_ = s; + break; + } + case 18: + { + com.google.protobuf.FieldMask.Builder subBuilder = null; + if (readMask_ != null) { + subBuilder = readMask_.toBuilder(); + } + readMask_ = + input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(readMask_); + readMask_ = subBuilder.buildPartial(); + } + + break; + } + case 24: + { + pageSize_ = input.readInt32(); + break; + } + case 34: + { + java.lang.String s = input.readStringRequireUtf8(); + + pageToken_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest.Builder + .class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + private volatile java.lang.Object parent_; + /** + * + * + *
+   * Required. The project and location of the migration workflows to list.
+   * Example: `projects/123/locations/us`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The project and location of the migration workflows to list.
+   * Example: `projects/123/locations/us`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int READ_MASK_FIELD_NUMBER = 2; + private com.google.protobuf.FieldMask readMask_; + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return Whether the readMask field is set. + */ + @java.lang.Override + public boolean hasReadMask() { + return readMask_ != null; + } + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return The readMask. + */ + @java.lang.Override + public com.google.protobuf.FieldMask getReadMask() { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + @java.lang.Override + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + return getReadMask(); + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 3; + private int pageSize_; + /** + * + * + *
+   * The maximum number of migration workflows to return. The service may return
+   * fewer than this number.
+   * 
+ * + * int32 page_size = 3; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 4; + private volatile java.lang.Object pageToken_; + /** + * + * + *
+   * A page token, received from previous `ListMigrationWorkflows` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationWorkflows`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4; + * + * @return The pageToken. + */ + @java.lang.Override + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A page token, received from previous `ListMigrationWorkflows` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationWorkflows`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4; + * + * @return The bytes for pageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getParentBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (readMask_ != null) { + output.writeMessage(2, getReadMask()); + } + if (pageSize_ != 0) { + output.writeInt32(3, pageSize_); + } + if (!getPageTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getParentBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (readMask_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReadMask()); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); + } + if (!getPageTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest other = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasReadMask() != other.hasReadMask()) return false; + if (hasReadMask()) { + if (!getReadMask().equals(other.getReadMask())) return false; + } + if (getPageSize() != other.getPageSize()) return false; + if (!getPageToken().equals(other.getPageToken())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasReadMask()) { + hash = (37 * hash) + READ_MASK_FIELD_NUMBER; + hash = (53 * hash) + getReadMask().hashCode(); + } + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A request to list previously created migration workflows.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + parent_ = ""; + + if (readMaskBuilder_ == null) { + readMask_ = null; + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + pageSize_ = 0; + + pageToken_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest build() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest result = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest(this); + result.parent_ = parent_; + if (readMaskBuilder_ == null) { + result.readMask_ = readMask_; + } else { + result.readMask_ = readMaskBuilder_.build(); + } + result.pageSize_ = pageSize_; + result.pageToken_ = pageToken_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + .getDefaultInstance()) return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + onChanged(); + } + if (other.hasReadMask()) { + mergeReadMask(other.getReadMask()); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The project and location of the migration workflows to list.
+     * Example: `projects/123/locations/us`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The project and location of the migration workflows to list.
+     * Example: `projects/123/locations/us`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The project and location of the migration workflows to list.
+     * Example: `projects/123/locations/us`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + parent_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The project and location of the migration workflows to list.
+     * Example: `projects/123/locations/us`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + + parent_ = getDefaultInstance().getParent(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The project and location of the migration workflows to list.
+     * Example: `projects/123/locations/us`
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + parent_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.FieldMask readMask_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + readMaskBuilder_; + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return Whether the readMask field is set. + */ + public boolean hasReadMask() { + return readMaskBuilder_ != null || readMask_ != null; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return The readMask. + */ + public com.google.protobuf.FieldMask getReadMask() { + if (readMaskBuilder_ == null) { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } else { + return readMaskBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder setReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + readMask_ = value; + onChanged(); + } else { + readMaskBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder setReadMask(com.google.protobuf.FieldMask.Builder builderForValue) { + if (readMaskBuilder_ == null) { + readMask_ = builderForValue.build(); + onChanged(); + } else { + readMaskBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder mergeReadMask(com.google.protobuf.FieldMask value) { + if (readMaskBuilder_ == null) { + if (readMask_ != null) { + readMask_ = + com.google.protobuf.FieldMask.newBuilder(readMask_).mergeFrom(value).buildPartial(); + } else { + readMask_ = value; + } + onChanged(); + } else { + readMaskBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public Builder clearReadMask() { + if (readMaskBuilder_ == null) { + readMask_ = null; + onChanged(); + } else { + readMask_ = null; + readMaskBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public com.google.protobuf.FieldMask.Builder getReadMaskBuilder() { + + onChanged(); + return getReadMaskFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + public com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder() { + if (readMaskBuilder_ != null) { + return readMaskBuilder_.getMessageOrBuilder(); + } else { + return readMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : readMask_; + } + } + /** + * + * + *
+     * The list of fields to be retrieved.
+     * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + getReadMaskFieldBuilder() { + if (readMaskBuilder_ == null) { + readMaskBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder>( + getReadMask(), getParentForChildren(), isClean()); + readMask_ = null; + } + return readMaskBuilder_; + } + + private int pageSize_; + /** + * + * + *
+     * The maximum number of migration workflows to return. The service may return
+     * fewer than this number.
+     * 
+ * + * int32 page_size = 3; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + /** + * + * + *
+     * The maximum number of migration workflows to return. The service may return
+     * fewer than this number.
+     * 
+ * + * int32 page_size = 3; + * + * @param value The pageSize to set. + * @return This builder for chaining. + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The maximum number of migration workflows to return. The service may return
+     * fewer than this number.
+     * 
+ * + * int32 page_size = 3; + * + * @return This builder for chaining. + */ + public Builder clearPageSize() { + + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + * + * + *
+     * A page token, received from previous `ListMigrationWorkflows` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationWorkflows`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4; + * + * @return The pageToken. + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A page token, received from previous `ListMigrationWorkflows` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationWorkflows`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4; + * + * @return The bytes for pageToken. + */ + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A page token, received from previous `ListMigrationWorkflows` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationWorkflows`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4; + * + * @param value The pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + pageToken_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from previous `ListMigrationWorkflows` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationWorkflows`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4; + * + * @return This builder for chaining. + */ + public Builder clearPageToken() { + + pageToken_ = getDefaultInstance().getPageToken(); + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from previous `ListMigrationWorkflows` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListMigrationWorkflows`
+     * must match the call that provided the page token.
+     * 
+ * + * string page_token = 4; + * + * @param value The bytes for pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + pageToken_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListMigrationWorkflowsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListMigrationWorkflowsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsRequestOrBuilder.java new file mode 100644 index 0000000..d040d45 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsRequestOrBuilder.java @@ -0,0 +1,136 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ListMigrationWorkflowsRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The project and location of the migration workflows to list.
+   * Example: `projects/123/locations/us`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The project and location of the migration workflows to list.
+   * Example: `projects/123/locations/us`
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return Whether the readMask field is set. + */ + boolean hasReadMask(); + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + * + * @return The readMask. + */ + com.google.protobuf.FieldMask getReadMask(); + /** + * + * + *
+   * The list of fields to be retrieved.
+   * 
+ * + * .google.protobuf.FieldMask read_mask = 2; + */ + com.google.protobuf.FieldMaskOrBuilder getReadMaskOrBuilder(); + + /** + * + * + *
+   * The maximum number of migration workflows to return. The service may return
+   * fewer than this number.
+   * 
+ * + * int32 page_size = 3; + * + * @return The pageSize. + */ + int getPageSize(); + + /** + * + * + *
+   * A page token, received from previous `ListMigrationWorkflows` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationWorkflows`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4; + * + * @return The pageToken. + */ + java.lang.String getPageToken(); + /** + * + * + *
+   * A page token, received from previous `ListMigrationWorkflows` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListMigrationWorkflows`
+   * must match the call that provided the page token.
+   * 
+ * + * string page_token = 4; + * + * @return The bytes for pageToken. + */ + com.google.protobuf.ByteString getPageTokenBytes(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsResponse.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsResponse.java new file mode 100644 index 0000000..bc75c6e --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsResponse.java @@ -0,0 +1,1238 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * Response object for a `ListMigrationWorkflows` call.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse} + */ +public final class ListMigrationWorkflowsResponse extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) + ListMigrationWorkflowsResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListMigrationWorkflowsResponse.newBuilder() to construct. + private ListMigrationWorkflowsResponse( + com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListMigrationWorkflowsResponse() { + migrationWorkflows_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListMigrationWorkflowsResponse(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ListMigrationWorkflowsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + migrationWorkflows_ = + new java.util.ArrayList< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow>(); + mutable_bitField0_ |= 0x00000001; + } + migrationWorkflows_.add( + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.parser(), + extensionRegistry)); + break; + } + case 18: + { + java.lang.String s = input.readStringRequireUtf8(); + + nextPageToken_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + migrationWorkflows_ = java.util.Collections.unmodifiableList(migrationWorkflows_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse.Builder + .class); + } + + public static final int MIGRATION_WORKFLOWS_FIELD_NUMBER = 1; + private java.util.List + migrationWorkflows_; + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + @java.lang.Override + public java.util.List + getMigrationWorkflowsList() { + return migrationWorkflows_; + } + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + @java.lang.Override + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder> + getMigrationWorkflowsOrBuilderList() { + return migrationWorkflows_; + } + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + @java.lang.Override + public int getMigrationWorkflowsCount() { + return migrationWorkflows_.size(); + } + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflows( + int index) { + return migrationWorkflows_.get(index); + } + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder + getMigrationWorkflowsOrBuilder(int index) { + return migrationWorkflows_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + private volatile java.lang.Object nextPageToken_; + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + @java.lang.Override + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < migrationWorkflows_.size(); i++) { + output.writeMessage(1, migrationWorkflows_.get(i)); + } + if (!getNextPageTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < migrationWorkflows_.size(); i++) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize(1, migrationWorkflows_.get(i)); + } + if (!getNextPageTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse other = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) obj; + + if (!getMigrationWorkflowsList().equals(other.getMigrationWorkflowsList())) return false; + if (!getNextPageToken().equals(other.getNextPageToken())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getMigrationWorkflowsCount() > 0) { + hash = (37 * hash) + MIGRATION_WORKFLOWS_FIELD_NUMBER; + hash = (53 * hash) + getMigrationWorkflowsList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Response object for a `ListMigrationWorkflows` call.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse.class, + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getMigrationWorkflowsFieldBuilder(); + } + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (migrationWorkflowsBuilder_ == null) { + migrationWorkflows_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + migrationWorkflowsBuilder_.clear(); + } + nextPageToken_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ListMigrationWorkflowsResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse build() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse result = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse(this); + int from_bitField0_ = bitField0_; + if (migrationWorkflowsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + migrationWorkflows_ = java.util.Collections.unmodifiableList(migrationWorkflows_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.migrationWorkflows_ = migrationWorkflows_; + } else { + result.migrationWorkflows_ = migrationWorkflowsBuilder_.build(); + } + result.nextPageToken_ = nextPageToken_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + .getDefaultInstance()) return this; + if (migrationWorkflowsBuilder_ == null) { + if (!other.migrationWorkflows_.isEmpty()) { + if (migrationWorkflows_.isEmpty()) { + migrationWorkflows_ = other.migrationWorkflows_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.addAll(other.migrationWorkflows_); + } + onChanged(); + } + } else { + if (!other.migrationWorkflows_.isEmpty()) { + if (migrationWorkflowsBuilder_.isEmpty()) { + migrationWorkflowsBuilder_.dispose(); + migrationWorkflowsBuilder_ = null; + migrationWorkflows_ = other.migrationWorkflows_; + bitField0_ = (bitField0_ & ~0x00000001); + migrationWorkflowsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getMigrationWorkflowsFieldBuilder() + : null; + } else { + migrationWorkflowsBuilder_.addAllMessages(other.migrationWorkflows_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private java.util.List + migrationWorkflows_ = java.util.Collections.emptyList(); + + private void ensureMigrationWorkflowsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + migrationWorkflows_ = + new java.util.ArrayList( + migrationWorkflows_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder> + migrationWorkflowsBuilder_; + + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public java.util.List + getMigrationWorkflowsList() { + if (migrationWorkflowsBuilder_ == null) { + return java.util.Collections.unmodifiableList(migrationWorkflows_); + } else { + return migrationWorkflowsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public int getMigrationWorkflowsCount() { + if (migrationWorkflowsBuilder_ == null) { + return migrationWorkflows_.size(); + } else { + return migrationWorkflowsBuilder_.getCount(); + } + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflows( + int index) { + if (migrationWorkflowsBuilder_ == null) { + return migrationWorkflows_.get(index); + } else { + return migrationWorkflowsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder setMigrationWorkflows( + int index, com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow value) { + if (migrationWorkflowsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.set(index, value); + onChanged(); + } else { + migrationWorkflowsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder setMigrationWorkflows( + int index, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder builderForValue) { + if (migrationWorkflowsBuilder_ == null) { + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.set(index, builderForValue.build()); + onChanged(); + } else { + migrationWorkflowsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder addMigrationWorkflows( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow value) { + if (migrationWorkflowsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.add(value); + onChanged(); + } else { + migrationWorkflowsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder addMigrationWorkflows( + int index, com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow value) { + if (migrationWorkflowsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.add(index, value); + onChanged(); + } else { + migrationWorkflowsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder addMigrationWorkflows( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder builderForValue) { + if (migrationWorkflowsBuilder_ == null) { + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.add(builderForValue.build()); + onChanged(); + } else { + migrationWorkflowsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder addMigrationWorkflows( + int index, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder builderForValue) { + if (migrationWorkflowsBuilder_ == null) { + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.add(index, builderForValue.build()); + onChanged(); + } else { + migrationWorkflowsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder addAllMigrationWorkflows( + java.lang.Iterable + values) { + if (migrationWorkflowsBuilder_ == null) { + ensureMigrationWorkflowsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, migrationWorkflows_); + onChanged(); + } else { + migrationWorkflowsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder clearMigrationWorkflows() { + if (migrationWorkflowsBuilder_ == null) { + migrationWorkflows_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + migrationWorkflowsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public Builder removeMigrationWorkflows(int index) { + if (migrationWorkflowsBuilder_ == null) { + ensureMigrationWorkflowsIsMutable(); + migrationWorkflows_.remove(index); + onChanged(); + } else { + migrationWorkflowsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder + getMigrationWorkflowsBuilder(int index) { + return getMigrationWorkflowsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder + getMigrationWorkflowsOrBuilder(int index) { + if (migrationWorkflowsBuilder_ == null) { + return migrationWorkflows_.get(index); + } else { + return migrationWorkflowsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder> + getMigrationWorkflowsOrBuilderList() { + if (migrationWorkflowsBuilder_ != null) { + return migrationWorkflowsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(migrationWorkflows_); + } + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder + addMigrationWorkflowsBuilder() { + return getMigrationWorkflowsFieldBuilder() + .addBuilder( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance()); + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder + addMigrationWorkflowsBuilder(int index) { + return getMigrationWorkflowsFieldBuilder() + .addBuilder( + index, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance()); + } + /** + * + * + *
+     * The migration workflows for the specified project / location.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + public java.util.List + getMigrationWorkflowsBuilderList() { + return getMigrationWorkflowsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder> + getMigrationWorkflowsFieldBuilder() { + if (migrationWorkflowsBuilder_ == null) { + migrationWorkflowsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder>( + migrationWorkflows_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + migrationWorkflows_ = null; + } + return migrationWorkflowsBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + nextPageToken_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return This builder for chaining. + */ + public Builder clearNextPageToken() { + + nextPageToken_ = getDefaultInstance().getNextPageToken(); + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The bytes for nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + nextPageToken_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) + private static final com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListMigrationWorkflowsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListMigrationWorkflowsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsResponseOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsResponseOrBuilder.java new file mode 100644 index 0000000..69b7076 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ListMigrationWorkflowsResponseOrBuilder.java @@ -0,0 +1,116 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ListMigrationWorkflowsResponseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + java.util.List + getMigrationWorkflowsList(); + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getMigrationWorkflows(int index); + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + int getMigrationWorkflowsCount(); + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + java.util.List + getMigrationWorkflowsOrBuilderList(); + /** + * + * + *
+   * The migration workflows for the specified project / location.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow migration_workflows = 1; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder + getMigrationWorkflowsOrBuilder(int index); + + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + java.lang.String getNextPageToken(); + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + com.google.protobuf.ByteString getNextPageTokenBytes(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/LocationName.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/LocationName.java new file mode 100644 index 0000000..0d4aa14 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/LocationName.java @@ -0,0 +1,192 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class LocationName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION = + PathTemplate.createWithoutUrlEncoding("projects/{project}/locations/{location}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + + @Deprecated + protected LocationName() { + project = null; + location = null; + } + + private LocationName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static LocationName of(String project, String location) { + return newBuilder().setProject(project).setLocation(location).build(); + } + + public static String format(String project, String location) { + return newBuilder().setProject(project).setLocation(location).build().toString(); + } + + public static LocationName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION.validatedMatch( + formattedString, "LocationName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (LocationName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION.instantiate("project", project, "location", location); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + LocationName that = ((LocationName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + return h; + } + + /** Builder for projects/{project}/locations/{location}. */ + public static class Builder { + private String project; + private String location; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + private Builder(LocationName locationName) { + this.project = locationName.project; + this.location = locationName.location; + } + + public LocationName build() { + return new LocationName(this); + } + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationEntitiesProto.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationEntitiesProto.java new file mode 100644 index 0000000..0b7f63b --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationEntitiesProto.java @@ -0,0 +1,191 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_entities.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public final class MigrationEntitiesProto { + private MigrationEntitiesProto() {} + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); + } + + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_TasksEntry_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_TasksEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + return descriptor; + } + + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; + + static { + java.lang.String[] descriptorData = { + "\n@google/cloud/bigquery/migration/v2alph" + + "a/migration_entities.proto\022\'google.cloud" + + ".bigquery.migration.v2alpha\032\037google/api/" + + "field_behavior.proto\032\031google/api/resourc" + + "e.proto\032Egoogle/cloud/bigquery/migration" + + "/v2alpha/migration_error_details.proto\032?" + + "google/cloud/bigquery/migration/v2alpha/" + + "migration_metrics.proto\032\031google/protobuf" + + "/any.proto\032\037google/protobuf/timestamp.pr" + + "oto\032\036google/rpc/error_details.proto\"\202\005\n\021" + + "MigrationWorkflow\022\024\n\004name\030\001 \001(\tB\006\340A\003\340A\005\022" + + "\024\n\014display_name\030\006 \001(\t\022T\n\005tasks\030\002 \003(\0132E.g" + + "oogle.cloud.bigquery.migration.v2alpha.M" + + "igrationWorkflow.TasksEntry\022T\n\005state\030\003 \001" + + "(\0162@.google.cloud.bigquery.migration.v2a" + + "lpha.MigrationWorkflow.StateB\003\340A\003\022/\n\013cre" + + "ate_time\030\004 \001(\0132\032.google.protobuf.Timesta" + + "mp\0224\n\020last_update_time\030\005 \001(\0132\032.google.pr" + + "otobuf.Timestamp\032d\n\nTasksEntry\022\013\n\003key\030\001 " + + "\001(\t\022E\n\005value\030\002 \001(\01326.google.cloud.bigque" + + "ry.migration.v2alpha.MigrationTask:\0028\001\"Q" + + "\n\005State\022\025\n\021STATE_UNSPECIFIED\020\000\022\t\n\005DRAFT\020" + + "\001\022\013\n\007RUNNING\020\002\022\n\n\006PAUSED\020\003\022\r\n\tCOMPLETED\020" + + "\004:u\352Ar\n2bigquerymigration.googleapis.com" + + "/MigrationWorkflow\022 + * A subtask for a migration which carries details about the configuration of + * the subtask. The content of the details should not matter to the end user, + * but is a contract between the subtask creator and subtask worker. + * + * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.MigrationSubtask} + */ +public final class MigrationSubtask extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.MigrationSubtask) + MigrationSubtaskOrBuilder { + private static final long serialVersionUID = 0L; + // Use MigrationSubtask.newBuilder() to construct. + private MigrationSubtask(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private MigrationSubtask() { + name_ = ""; + taskId_ = ""; + type_ = ""; + state_ = 0; + resourceErrorDetails_ = java.util.Collections.emptyList(); + metrics_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new MigrationSubtask(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private MigrationSubtask( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: + { + java.lang.String s = input.readStringRequireUtf8(); + + taskId_ = s; + break; + } + case 26: + { + java.lang.String s = input.readStringRequireUtf8(); + + type_ = s; + break; + } + case 40: + { + int rawValue = input.readEnum(); + + state_ = rawValue; + break; + } + case 50: + { + com.google.rpc.ErrorInfo.Builder subBuilder = null; + if (processingError_ != null) { + subBuilder = processingError_.toBuilder(); + } + processingError_ = + input.readMessage(com.google.rpc.ErrorInfo.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(processingError_); + processingError_ = subBuilder.buildPartial(); + } + + break; + } + case 58: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (createTime_ != null) { + subBuilder = createTime_.toBuilder(); + } + createTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(createTime_); + createTime_ = subBuilder.buildPartial(); + } + + break; + } + case 66: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (lastUpdateTime_ != null) { + subBuilder = lastUpdateTime_.toBuilder(); + } + lastUpdateTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(lastUpdateTime_); + lastUpdateTime_ = subBuilder.buildPartial(); + } + + break; + } + case 90: + { + if (!((mutable_bitField0_ & 0x00000002) != 0)) { + metrics_ = + new java.util.ArrayList< + com.google.cloud.bigquery.migration.v2alpha.TimeSeries>(); + mutable_bitField0_ |= 0x00000002; + } + metrics_.add( + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.parser(), + extensionRegistry)); + break; + } + case 98: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + resourceErrorDetails_ = + new java.util.ArrayList< + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail>(); + mutable_bitField0_ |= 0x00000001; + } + resourceErrorDetails_.add( + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.parser(), + extensionRegistry)); + break; + } + case 104: + { + resourceErrorCount_ = input.readInt32(); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) != 0)) { + metrics_ = java.util.Collections.unmodifiableList(metrics_); + } + if (((mutable_bitField0_ & 0x00000001) != 0)) { + resourceErrorDetails_ = java.util.Collections.unmodifiableList(resourceErrorDetails_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.class, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder.class); + } + + /** + * + * + *
+   * Possible states of a migration subtask.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State} + */ + public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The state is unspecified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + STATE_UNSPECIFIED(0), + /** + * + * + *
+     * The subtask is ready, i.e. it is ready for execution.
+     * 
+ * + * ACTIVE = 1; + */ + ACTIVE(1), + /** + * + * + *
+     * The subtask is running, i.e. it is assigned to a worker for execution.
+     * 
+ * + * RUNNING = 2; + */ + RUNNING(2), + /** + * + * + *
+     * The subtask finished successfully.
+     * 
+ * + * SUCCEEDED = 3; + */ + SUCCEEDED(3), + /** + * + * + *
+     * The subtask finished unsuccessfully.
+     * 
+ * + * FAILED = 4; + */ + FAILED(4), + /** + * + * + *
+     * The subtask is paused, i.e., it will not be scheduled. If it was already
+     * assigned,it might still finish but no new lease renewals will be granted.
+     * 
+ * + * PAUSED = 5; + */ + PAUSED(5), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The state is unspecified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + public static final int STATE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * The subtask is ready, i.e. it is ready for execution.
+     * 
+ * + * ACTIVE = 1; + */ + public static final int ACTIVE_VALUE = 1; + /** + * + * + *
+     * The subtask is running, i.e. it is assigned to a worker for execution.
+     * 
+ * + * RUNNING = 2; + */ + public static final int RUNNING_VALUE = 2; + /** + * + * + *
+     * The subtask finished successfully.
+     * 
+ * + * SUCCEEDED = 3; + */ + public static final int SUCCEEDED_VALUE = 3; + /** + * + * + *
+     * The subtask finished unsuccessfully.
+     * 
+ * + * FAILED = 4; + */ + public static final int FAILED_VALUE = 4; + /** + * + * + *
+     * The subtask is paused, i.e., it will not be scheduled. If it was already
+     * assigned,it might still finish but no new lease renewals will be granted.
+     * 
+ * + * PAUSED = 5; + */ + public static final int PAUSED_VALUE = 5; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static State valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static State forNumber(int value) { + switch (value) { + case 0: + return STATE_UNSPECIFIED; + case 1: + return ACTIVE; + case 2: + return RUNNING; + case 3: + return SUCCEEDED; + case 4: + return FAILED; + case 5: + return PAUSED; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.getDescriptor() + .getEnumTypes() + .get(0); + } + + private static final State[] VALUES = values(); + + public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private State(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State) + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * + * + *
+   * Output only. Immutable. The resource name for the migration subtask. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. Immutable. The resource name for the migration subtask. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TASK_ID_FIELD_NUMBER = 2; + private volatile java.lang.Object taskId_; + /** + * + * + *
+   * The unique ID of the task to which this subtask belongs.
+   * 
+ * + * string task_id = 2; + * + * @return The taskId. + */ + @java.lang.Override + public java.lang.String getTaskId() { + java.lang.Object ref = taskId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskId_ = s; + return s; + } + } + /** + * + * + *
+   * The unique ID of the task to which this subtask belongs.
+   * 
+ * + * string task_id = 2; + * + * @return The bytes for taskId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTaskIdBytes() { + java.lang.Object ref = taskId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + taskId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TYPE_FIELD_NUMBER = 3; + private volatile java.lang.Object type_; + /** + * + * + *
+   * The type of the Subtask. The migration service does not check whether this
+   * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+   * to ensure it only creates subtasks for which there are compatible workers
+   * polling for Subtasks.
+   * 
+ * + * string type = 3; + * + * @return The type. + */ + @java.lang.Override + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } + } + /** + * + * + *
+   * The type of the Subtask. The migration service does not check whether this
+   * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+   * to ensure it only creates subtasks for which there are compatible workers
+   * polling for Subtasks.
+   * 
+ * + * string type = 3; + * + * @return The bytes for type. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATE_FIELD_NUMBER = 5; + private int state_; + /** + * + * + *
+   * Output only. The current state of the subtask.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+   * Output only. The current state of the subtask.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State result = + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State.valueOf(state_); + return result == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State.UNRECOGNIZED + : result; + } + + public static final int PROCESSING_ERROR_FIELD_NUMBER = 6; + private com.google.rpc.ErrorInfo processingError_; + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the processingError field is set. + */ + @java.lang.Override + public boolean hasProcessingError() { + return processingError_ != null; + } + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The processingError. + */ + @java.lang.Override + public com.google.rpc.ErrorInfo getProcessingError() { + return processingError_ == null + ? com.google.rpc.ErrorInfo.getDefaultInstance() + : processingError_; + } + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.rpc.ErrorInfoOrBuilder getProcessingErrorOrBuilder() { + return getProcessingError(); + } + + public static final int RESOURCE_ERROR_DETAILS_FIELD_NUMBER = 12; + private java.util.List + resourceErrorDetails_; + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public java.util.List + getResourceErrorDetailsList() { + return resourceErrorDetails_; + } + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder> + getResourceErrorDetailsOrBuilderList() { + return resourceErrorDetails_; + } + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public int getResourceErrorDetailsCount() { + return resourceErrorDetails_.size(); + } + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail getResourceErrorDetails( + int index) { + return resourceErrorDetails_.get(index); + } + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder + getResourceErrorDetailsOrBuilder(int index) { + return resourceErrorDetails_.get(index); + } + + public static final int RESOURCE_ERROR_COUNT_FIELD_NUMBER = 13; + private int resourceErrorCount_; + /** + * + * + *
+   * The number or resources with errors. Note: This is not the total
+   * number of errors as each resource can have more than one error.
+   * This is used to indicate truncation by having a `resource_error_count`
+   * that is higher than the size of `resource_error_details`.
+   * 
+ * + * int32 resource_error_count = 13; + * + * @return The resourceErrorCount. + */ + @java.lang.Override + public int getResourceErrorCount() { + return resourceErrorCount_; + } + + public static final int CREATE_TIME_FIELD_NUMBER = 7; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Time when the subtask was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 7; + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Time when the subtask was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 7; + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Time when the subtask was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return getCreateTime(); + } + + public static final int LAST_UPDATE_TIME_FIELD_NUMBER = 8; + private com.google.protobuf.Timestamp lastUpdateTime_; + /** + * + * + *
+   * Time when the subtask was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + * + * @return Whether the lastUpdateTime field is set. + */ + @java.lang.Override + public boolean hasLastUpdateTime() { + return lastUpdateTime_ != null; + } + /** + * + * + *
+   * Time when the subtask was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + * + * @return The lastUpdateTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getLastUpdateTime() { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } + /** + * + * + *
+   * Time when the subtask was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { + return getLastUpdateTime(); + } + + public static final int METRICS_FIELD_NUMBER = 11; + private java.util.List metrics_; + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + @java.lang.Override + public java.util.List getMetricsList() { + return metrics_; + } + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + @java.lang.Override + public java.util.List + getMetricsOrBuilderList() { + return metrics_; + } + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + @java.lang.Override + public int getMetricsCount() { + return metrics_.size(); + } + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries getMetrics(int index) { + return metrics_.get(index); + } + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder getMetricsOrBuilder( + int index) { + return metrics_.get(index); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (!getTaskIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, taskId_); + } + if (!getTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, type_); + } + if (state_ + != com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State.STATE_UNSPECIFIED + .getNumber()) { + output.writeEnum(5, state_); + } + if (processingError_ != null) { + output.writeMessage(6, getProcessingError()); + } + if (createTime_ != null) { + output.writeMessage(7, getCreateTime()); + } + if (lastUpdateTime_ != null) { + output.writeMessage(8, getLastUpdateTime()); + } + for (int i = 0; i < metrics_.size(); i++) { + output.writeMessage(11, metrics_.get(i)); + } + for (int i = 0; i < resourceErrorDetails_.size(); i++) { + output.writeMessage(12, resourceErrorDetails_.get(i)); + } + if (resourceErrorCount_ != 0) { + output.writeInt32(13, resourceErrorCount_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (!getTaskIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, taskId_); + } + if (!getTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, type_); + } + if (state_ + != com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State.STATE_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, state_); + } + if (processingError_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, getProcessingError()); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, getCreateTime()); + } + if (lastUpdateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getLastUpdateTime()); + } + for (int i = 0; i < metrics_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(11, metrics_.get(i)); + } + for (int i = 0; i < resourceErrorDetails_.size(); i++) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize( + 12, resourceErrorDetails_.get(i)); + } + if (resourceErrorCount_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(13, resourceErrorCount_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask other = + (com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) obj; + + if (!getName().equals(other.getName())) return false; + if (!getTaskId().equals(other.getTaskId())) return false; + if (!getType().equals(other.getType())) return false; + if (state_ != other.state_) return false; + if (hasProcessingError() != other.hasProcessingError()) return false; + if (hasProcessingError()) { + if (!getProcessingError().equals(other.getProcessingError())) return false; + } + if (!getResourceErrorDetailsList().equals(other.getResourceErrorDetailsList())) return false; + if (getResourceErrorCount() != other.getResourceErrorCount()) return false; + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (hasLastUpdateTime() != other.hasLastUpdateTime()) return false; + if (hasLastUpdateTime()) { + if (!getLastUpdateTime().equals(other.getLastUpdateTime())) return false; + } + if (!getMetricsList().equals(other.getMetricsList())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + TASK_ID_FIELD_NUMBER; + hash = (53 * hash) + getTaskId().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + state_; + if (hasProcessingError()) { + hash = (37 * hash) + PROCESSING_ERROR_FIELD_NUMBER; + hash = (53 * hash) + getProcessingError().hashCode(); + } + if (getResourceErrorDetailsCount() > 0) { + hash = (37 * hash) + RESOURCE_ERROR_DETAILS_FIELD_NUMBER; + hash = (53 * hash) + getResourceErrorDetailsList().hashCode(); + } + hash = (37 * hash) + RESOURCE_ERROR_COUNT_FIELD_NUMBER; + hash = (53 * hash) + getResourceErrorCount(); + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasLastUpdateTime()) { + hash = (37 * hash) + LAST_UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getLastUpdateTime().hashCode(); + } + if (getMetricsCount() > 0) { + hash = (37 * hash) + METRICS_FIELD_NUMBER; + hash = (53 * hash) + getMetricsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A subtask for a migration which carries details about the configuration of
+   * the subtask. The content of the details should not matter to the end user,
+   * but is a contract between the subtask creator and subtask worker.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.MigrationSubtask} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.MigrationSubtask) + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtaskOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.class, + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getResourceErrorDetailsFieldBuilder(); + getMetricsFieldBuilder(); + } + } + + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + taskId_ = ""; + + type_ = ""; + + state_ = 0; + + if (processingErrorBuilder_ == null) { + processingError_ = null; + } else { + processingError_ = null; + processingErrorBuilder_ = null; + } + if (resourceErrorDetailsBuilder_ == null) { + resourceErrorDetails_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resourceErrorDetailsBuilder_.clear(); + } + resourceErrorCount_ = 0; + + if (createTimeBuilder_ == null) { + createTime_ = null; + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = null; + } else { + lastUpdateTime_ = null; + lastUpdateTimeBuilder_ = null; + } + if (metricsBuilder_ == null) { + metrics_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + metricsBuilder_.clear(); + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationSubtask_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask build() { + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask result = + new com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask(this); + int from_bitField0_ = bitField0_; + result.name_ = name_; + result.taskId_ = taskId_; + result.type_ = type_; + result.state_ = state_; + if (processingErrorBuilder_ == null) { + result.processingError_ = processingError_; + } else { + result.processingError_ = processingErrorBuilder_.build(); + } + if (resourceErrorDetailsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + resourceErrorDetails_ = java.util.Collections.unmodifiableList(resourceErrorDetails_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.resourceErrorDetails_ = resourceErrorDetails_; + } else { + result.resourceErrorDetails_ = resourceErrorDetailsBuilder_.build(); + } + result.resourceErrorCount_ = resourceErrorCount_; + if (createTimeBuilder_ == null) { + result.createTime_ = createTime_; + } else { + result.createTime_ = createTimeBuilder_.build(); + } + if (lastUpdateTimeBuilder_ == null) { + result.lastUpdateTime_ = lastUpdateTime_; + } else { + result.lastUpdateTime_ = lastUpdateTimeBuilder_.build(); + } + if (metricsBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0)) { + metrics_ = java.util.Collections.unmodifiableList(metrics_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.metrics_ = metrics_; + } else { + result.metrics_ = metricsBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getTaskId().isEmpty()) { + taskId_ = other.taskId_; + onChanged(); + } + if (!other.getType().isEmpty()) { + type_ = other.type_; + onChanged(); + } + if (other.state_ != 0) { + setStateValue(other.getStateValue()); + } + if (other.hasProcessingError()) { + mergeProcessingError(other.getProcessingError()); + } + if (resourceErrorDetailsBuilder_ == null) { + if (!other.resourceErrorDetails_.isEmpty()) { + if (resourceErrorDetails_.isEmpty()) { + resourceErrorDetails_ = other.resourceErrorDetails_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.addAll(other.resourceErrorDetails_); + } + onChanged(); + } + } else { + if (!other.resourceErrorDetails_.isEmpty()) { + if (resourceErrorDetailsBuilder_.isEmpty()) { + resourceErrorDetailsBuilder_.dispose(); + resourceErrorDetailsBuilder_ = null; + resourceErrorDetails_ = other.resourceErrorDetails_; + bitField0_ = (bitField0_ & ~0x00000001); + resourceErrorDetailsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getResourceErrorDetailsFieldBuilder() + : null; + } else { + resourceErrorDetailsBuilder_.addAllMessages(other.resourceErrorDetails_); + } + } + } + if (other.getResourceErrorCount() != 0) { + setResourceErrorCount(other.getResourceErrorCount()); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasLastUpdateTime()) { + mergeLastUpdateTime(other.getLastUpdateTime()); + } + if (metricsBuilder_ == null) { + if (!other.metrics_.isEmpty()) { + if (metrics_.isEmpty()) { + metrics_ = other.metrics_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureMetricsIsMutable(); + metrics_.addAll(other.metrics_); + } + onChanged(); + } + } else { + if (!other.metrics_.isEmpty()) { + if (metricsBuilder_.isEmpty()) { + metricsBuilder_.dispose(); + metricsBuilder_ = null; + metrics_ = other.metrics_; + bitField0_ = (bitField0_ & ~0x00000002); + metricsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getMetricsFieldBuilder() + : null; + } else { + metricsBuilder_.addAllMessages(other.metrics_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Output only. Immutable. The resource name for the migration subtask. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. Immutable. The resource name for the migration subtask. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. Immutable. The resource name for the migration subtask. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. Immutable. The resource name for the migration subtask. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. Immutable. The resource name for the migration subtask. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object taskId_ = ""; + /** + * + * + *
+     * The unique ID of the task to which this subtask belongs.
+     * 
+ * + * string task_id = 2; + * + * @return The taskId. + */ + public java.lang.String getTaskId() { + java.lang.Object ref = taskId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * The unique ID of the task to which this subtask belongs.
+     * 
+ * + * string task_id = 2; + * + * @return The bytes for taskId. + */ + public com.google.protobuf.ByteString getTaskIdBytes() { + java.lang.Object ref = taskId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + taskId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * The unique ID of the task to which this subtask belongs.
+     * 
+ * + * string task_id = 2; + * + * @param value The taskId to set. + * @return This builder for chaining. + */ + public Builder setTaskId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + taskId_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The unique ID of the task to which this subtask belongs.
+     * 
+ * + * string task_id = 2; + * + * @return This builder for chaining. + */ + public Builder clearTaskId() { + + taskId_ = getDefaultInstance().getTaskId(); + onChanged(); + return this; + } + /** + * + * + *
+     * The unique ID of the task to which this subtask belongs.
+     * 
+ * + * string task_id = 2; + * + * @param value The bytes for taskId to set. + * @return This builder for chaining. + */ + public Builder setTaskIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + taskId_ = value; + onChanged(); + return this; + } + + private java.lang.Object type_ = ""; + /** + * + * + *
+     * The type of the Subtask. The migration service does not check whether this
+     * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+     * to ensure it only creates subtasks for which there are compatible workers
+     * polling for Subtasks.
+     * 
+ * + * string type = 3; + * + * @return The type. + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * The type of the Subtask. The migration service does not check whether this
+     * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+     * to ensure it only creates subtasks for which there are compatible workers
+     * polling for Subtasks.
+     * 
+ * + * string type = 3; + * + * @return The bytes for type. + */ + public com.google.protobuf.ByteString getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * The type of the Subtask. The migration service does not check whether this
+     * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+     * to ensure it only creates subtasks for which there are compatible workers
+     * polling for Subtasks.
+     * 
+ * + * string type = 3; + * + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + type_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The type of the Subtask. The migration service does not check whether this
+     * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+     * to ensure it only creates subtasks for which there are compatible workers
+     * polling for Subtasks.
+     * 
+ * + * string type = 3; + * + * @return This builder for chaining. + */ + public Builder clearType() { + + type_ = getDefaultInstance().getType(); + onChanged(); + return this; + } + /** + * + * + *
+     * The type of the Subtask. The migration service does not check whether this
+     * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+     * to ensure it only creates subtasks for which there are compatible workers
+     * polling for Subtasks.
+     * 
+ * + * string type = 3; + * + * @param value The bytes for type to set. + * @return This builder for chaining. + */ + public Builder setTypeBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + type_ = value; + onChanged(); + return this; + } + + private int state_ = 0; + /** + * + * + *
+     * Output only. The current state of the subtask.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+     * Output only. The current state of the subtask.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The enum numeric value on the wire for state to set. + * @return This builder for chaining. + */ + public Builder setStateValue(int value) { + + state_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The current state of the subtask.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State result = + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State.valueOf(state_); + return result == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * Output only. The current state of the subtask.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState( + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State value) { + if (value == null) { + throw new NullPointerException(); + } + + state_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The current state of the subtask.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return This builder for chaining. + */ + public Builder clearState() { + + state_ = 0; + onChanged(); + return this; + } + + private com.google.rpc.ErrorInfo processingError_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder> + processingErrorBuilder_; + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the processingError field is set. + */ + public boolean hasProcessingError() { + return processingErrorBuilder_ != null || processingError_ != null; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The processingError. + */ + public com.google.rpc.ErrorInfo getProcessingError() { + if (processingErrorBuilder_ == null) { + return processingError_ == null + ? com.google.rpc.ErrorInfo.getDefaultInstance() + : processingError_; + } else { + return processingErrorBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setProcessingError(com.google.rpc.ErrorInfo value) { + if (processingErrorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + processingError_ = value; + onChanged(); + } else { + processingErrorBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setProcessingError(com.google.rpc.ErrorInfo.Builder builderForValue) { + if (processingErrorBuilder_ == null) { + processingError_ = builderForValue.build(); + onChanged(); + } else { + processingErrorBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeProcessingError(com.google.rpc.ErrorInfo value) { + if (processingErrorBuilder_ == null) { + if (processingError_ != null) { + processingError_ = + com.google.rpc.ErrorInfo.newBuilder(processingError_).mergeFrom(value).buildPartial(); + } else { + processingError_ = value; + } + onChanged(); + } else { + processingErrorBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearProcessingError() { + if (processingErrorBuilder_ == null) { + processingError_ = null; + onChanged(); + } else { + processingError_ = null; + processingErrorBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.rpc.ErrorInfo.Builder getProcessingErrorBuilder() { + + onChanged(); + return getProcessingErrorFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.rpc.ErrorInfoOrBuilder getProcessingErrorOrBuilder() { + if (processingErrorBuilder_ != null) { + return processingErrorBuilder_.getMessageOrBuilder(); + } else { + return processingError_ == null + ? com.google.rpc.ErrorInfo.getDefaultInstance() + : processingError_; + } + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder> + getProcessingErrorFieldBuilder() { + if (processingErrorBuilder_ == null) { + processingErrorBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder>( + getProcessingError(), getParentForChildren(), isClean()); + processingError_ = null; + } + return processingErrorBuilder_; + } + + private java.util.List + resourceErrorDetails_ = java.util.Collections.emptyList(); + + private void ensureResourceErrorDetailsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + resourceErrorDetails_ = + new java.util.ArrayList< + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail>( + resourceErrorDetails_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder> + resourceErrorDetailsBuilder_; + + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public java.util.List + getResourceErrorDetailsList() { + if (resourceErrorDetailsBuilder_ == null) { + return java.util.Collections.unmodifiableList(resourceErrorDetails_); + } else { + return resourceErrorDetailsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public int getResourceErrorDetailsCount() { + if (resourceErrorDetailsBuilder_ == null) { + return resourceErrorDetails_.size(); + } else { + return resourceErrorDetailsBuilder_.getCount(); + } + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail getResourceErrorDetails( + int index) { + if (resourceErrorDetailsBuilder_ == null) { + return resourceErrorDetails_.get(index); + } else { + return resourceErrorDetailsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setResourceErrorDetails( + int index, com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail value) { + if (resourceErrorDetailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.set(index, value); + onChanged(); + } else { + resourceErrorDetailsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setResourceErrorDetails( + int index, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder builderForValue) { + if (resourceErrorDetailsBuilder_ == null) { + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.set(index, builderForValue.build()); + onChanged(); + } else { + resourceErrorDetailsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addResourceErrorDetails( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail value) { + if (resourceErrorDetailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.add(value); + onChanged(); + } else { + resourceErrorDetailsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addResourceErrorDetails( + int index, com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail value) { + if (resourceErrorDetailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.add(index, value); + onChanged(); + } else { + resourceErrorDetailsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addResourceErrorDetails( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder builderForValue) { + if (resourceErrorDetailsBuilder_ == null) { + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.add(builderForValue.build()); + onChanged(); + } else { + resourceErrorDetailsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addResourceErrorDetails( + int index, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder builderForValue) { + if (resourceErrorDetailsBuilder_ == null) { + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.add(index, builderForValue.build()); + onChanged(); + } else { + resourceErrorDetailsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder addAllResourceErrorDetails( + java.lang.Iterable< + ? extends com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail> + values) { + if (resourceErrorDetailsBuilder_ == null) { + ensureResourceErrorDetailsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, resourceErrorDetails_); + onChanged(); + } else { + resourceErrorDetailsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearResourceErrorDetails() { + if (resourceErrorDetailsBuilder_ == null) { + resourceErrorDetails_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resourceErrorDetailsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder removeResourceErrorDetails(int index) { + if (resourceErrorDetailsBuilder_ == null) { + ensureResourceErrorDetailsIsMutable(); + resourceErrorDetails_.remove(index); + onChanged(); + } else { + resourceErrorDetailsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder + getResourceErrorDetailsBuilder(int index) { + return getResourceErrorDetailsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder + getResourceErrorDetailsOrBuilder(int index) { + if (resourceErrorDetailsBuilder_ == null) { + return resourceErrorDetails_.get(index); + } else { + return resourceErrorDetailsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder> + getResourceErrorDetailsOrBuilderList() { + if (resourceErrorDetailsBuilder_ != null) { + return resourceErrorDetailsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(resourceErrorDetails_); + } + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder + addResourceErrorDetailsBuilder() { + return getResourceErrorDetailsFieldBuilder() + .addBuilder( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.getDefaultInstance()); + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder + addResourceErrorDetailsBuilder(int index) { + return getResourceErrorDetailsFieldBuilder() + .addBuilder( + index, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.getDefaultInstance()); + } + /** + * + * + *
+     * Output only. Provides details to errors and issues encountered while processing the
+     * subtask. Presence of error details does not mean that the subtask failed.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public java.util.List + getResourceErrorDetailsBuilderList() { + return getResourceErrorDetailsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder> + getResourceErrorDetailsFieldBuilder() { + if (resourceErrorDetailsBuilder_ == null) { + resourceErrorDetailsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder>( + resourceErrorDetails_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + resourceErrorDetails_ = null; + } + return resourceErrorDetailsBuilder_; + } + + private int resourceErrorCount_; + /** + * + * + *
+     * The number or resources with errors. Note: This is not the total
+     * number of errors as each resource can have more than one error.
+     * This is used to indicate truncation by having a `resource_error_count`
+     * that is higher than the size of `resource_error_details`.
+     * 
+ * + * int32 resource_error_count = 13; + * + * @return The resourceErrorCount. + */ + @java.lang.Override + public int getResourceErrorCount() { + return resourceErrorCount_; + } + /** + * + * + *
+     * The number or resources with errors. Note: This is not the total
+     * number of errors as each resource can have more than one error.
+     * This is used to indicate truncation by having a `resource_error_count`
+     * that is higher than the size of `resource_error_details`.
+     * 
+ * + * int32 resource_error_count = 13; + * + * @param value The resourceErrorCount to set. + * @return This builder for chaining. + */ + public Builder setResourceErrorCount(int value) { + + resourceErrorCount_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The number or resources with errors. Note: This is not the total
+     * number of errors as each resource can have more than one error.
+     * This is used to indicate truncation by having a `resource_error_count`
+     * that is higher than the size of `resource_error_details`.
+     * 
+ * + * int32 resource_error_count = 13; + * + * @return This builder for chaining. + */ + public Builder clearResourceErrorCount() { + + resourceErrorCount_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return createTimeBuilder_ != null || createTime_ != null; + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + onChanged(); + } else { + createTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + onChanged(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (createTime_ != null) { + createTime_ = + com.google.protobuf.Timestamp.newBuilder(createTime_).mergeFrom(value).buildPartial(); + } else { + createTime_ = value; + } + onChanged(); + } else { + createTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + public Builder clearCreateTime() { + if (createTimeBuilder_ == null) { + createTime_ = null; + onChanged(); + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Time when the subtask was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp lastUpdateTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + lastUpdateTimeBuilder_; + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + * + * @return Whether the lastUpdateTime field is set. + */ + public boolean hasLastUpdateTime() { + return lastUpdateTimeBuilder_ != null || lastUpdateTime_ != null; + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + * + * @return The lastUpdateTime. + */ + public com.google.protobuf.Timestamp getLastUpdateTime() { + if (lastUpdateTimeBuilder_ == null) { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } else { + return lastUpdateTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + public Builder setLastUpdateTime(com.google.protobuf.Timestamp value) { + if (lastUpdateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + lastUpdateTime_ = value; + onChanged(); + } else { + lastUpdateTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + public Builder setLastUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = builderForValue.build(); + onChanged(); + } else { + lastUpdateTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + public Builder mergeLastUpdateTime(com.google.protobuf.Timestamp value) { + if (lastUpdateTimeBuilder_ == null) { + if (lastUpdateTime_ != null) { + lastUpdateTime_ = + com.google.protobuf.Timestamp.newBuilder(lastUpdateTime_) + .mergeFrom(value) + .buildPartial(); + } else { + lastUpdateTime_ = value; + } + onChanged(); + } else { + lastUpdateTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + public Builder clearLastUpdateTime() { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = null; + onChanged(); + } else { + lastUpdateTime_ = null; + lastUpdateTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + public com.google.protobuf.Timestamp.Builder getLastUpdateTimeBuilder() { + + onChanged(); + return getLastUpdateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { + if (lastUpdateTimeBuilder_ != null) { + return lastUpdateTimeBuilder_.getMessageOrBuilder(); + } else { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } + } + /** + * + * + *
+     * Time when the subtask was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getLastUpdateTimeFieldBuilder() { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getLastUpdateTime(), getParentForChildren(), isClean()); + lastUpdateTime_ = null; + } + return lastUpdateTimeBuilder_; + } + + private java.util.List metrics_ = + java.util.Collections.emptyList(); + + private void ensureMetricsIsMutable() { + if (!((bitField0_ & 0x00000002) != 0)) { + metrics_ = + new java.util.ArrayList( + metrics_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TimeSeries, + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder, + com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder> + metricsBuilder_; + + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public java.util.List getMetricsList() { + if (metricsBuilder_ == null) { + return java.util.Collections.unmodifiableList(metrics_); + } else { + return metricsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public int getMetricsCount() { + if (metricsBuilder_ == null) { + return metrics_.size(); + } else { + return metricsBuilder_.getCount(); + } + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries getMetrics(int index) { + if (metricsBuilder_ == null) { + return metrics_.get(index); + } else { + return metricsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder setMetrics( + int index, com.google.cloud.bigquery.migration.v2alpha.TimeSeries value) { + if (metricsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMetricsIsMutable(); + metrics_.set(index, value); + onChanged(); + } else { + metricsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder setMetrics( + int index, com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder builderForValue) { + if (metricsBuilder_ == null) { + ensureMetricsIsMutable(); + metrics_.set(index, builderForValue.build()); + onChanged(); + } else { + metricsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder addMetrics(com.google.cloud.bigquery.migration.v2alpha.TimeSeries value) { + if (metricsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMetricsIsMutable(); + metrics_.add(value); + onChanged(); + } else { + metricsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder addMetrics( + int index, com.google.cloud.bigquery.migration.v2alpha.TimeSeries value) { + if (metricsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMetricsIsMutable(); + metrics_.add(index, value); + onChanged(); + } else { + metricsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder addMetrics( + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder builderForValue) { + if (metricsBuilder_ == null) { + ensureMetricsIsMutable(); + metrics_.add(builderForValue.build()); + onChanged(); + } else { + metricsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder addMetrics( + int index, com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder builderForValue) { + if (metricsBuilder_ == null) { + ensureMetricsIsMutable(); + metrics_.add(index, builderForValue.build()); + onChanged(); + } else { + metricsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder addAllMetrics( + java.lang.Iterable + values) { + if (metricsBuilder_ == null) { + ensureMetricsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, metrics_); + onChanged(); + } else { + metricsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder clearMetrics() { + if (metricsBuilder_ == null) { + metrics_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + metricsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public Builder removeMetrics(int index) { + if (metricsBuilder_ == null) { + ensureMetricsIsMutable(); + metrics_.remove(index); + onChanged(); + } else { + metricsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder getMetricsBuilder( + int index) { + return getMetricsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder getMetricsOrBuilder( + int index) { + if (metricsBuilder_ == null) { + return metrics_.get(index); + } else { + return metricsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public java.util.List + getMetricsOrBuilderList() { + if (metricsBuilder_ != null) { + return metricsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(metrics_); + } + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder addMetricsBuilder() { + return getMetricsFieldBuilder() + .addBuilder(com.google.cloud.bigquery.migration.v2alpha.TimeSeries.getDefaultInstance()); + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder addMetricsBuilder( + int index) { + return getMetricsFieldBuilder() + .addBuilder( + index, com.google.cloud.bigquery.migration.v2alpha.TimeSeries.getDefaultInstance()); + } + /** + * + * + *
+     * The metrics for the subtask.
+     * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + public java.util.List + getMetricsBuilderList() { + return getMetricsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TimeSeries, + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder, + com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder> + getMetricsFieldBuilder() { + if (metricsBuilder_ == null) { + metricsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TimeSeries, + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder, + com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder>( + metrics_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); + metrics_ = null; + } + return metricsBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.MigrationSubtask) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.MigrationSubtask) + private static final com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public MigrationSubtask parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MigrationSubtask(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationSubtaskName.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationSubtaskName.java new file mode 100644 index 0000000..bf5eaad --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationSubtaskName.java @@ -0,0 +1,260 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class MigrationSubtaskName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_WORKFLOW_SUBTASK = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String workflow; + private final String subtask; + + @Deprecated + protected MigrationSubtaskName() { + project = null; + location = null; + workflow = null; + subtask = null; + } + + private MigrationSubtaskName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + workflow = Preconditions.checkNotNull(builder.getWorkflow()); + subtask = Preconditions.checkNotNull(builder.getSubtask()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getWorkflow() { + return workflow; + } + + public String getSubtask() { + return subtask; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static MigrationSubtaskName of( + String project, String location, String workflow, String subtask) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setWorkflow(workflow) + .setSubtask(subtask) + .build(); + } + + public static String format(String project, String location, String workflow, String subtask) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setWorkflow(workflow) + .setSubtask(subtask) + .build() + .toString(); + } + + public static MigrationSubtaskName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_WORKFLOW_SUBTASK.validatedMatch( + formattedString, "MigrationSubtaskName.parse: formattedString not in valid format"); + return of( + matchMap.get("project"), + matchMap.get("location"), + matchMap.get("workflow"), + matchMap.get("subtask")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (MigrationSubtaskName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_WORKFLOW_SUBTASK.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (workflow != null) { + fieldMapBuilder.put("workflow", workflow); + } + if (subtask != null) { + fieldMapBuilder.put("subtask", subtask); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_WORKFLOW_SUBTASK.instantiate( + "project", project, "location", location, "workflow", workflow, "subtask", subtask); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + MigrationSubtaskName that = ((MigrationSubtaskName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.workflow, that.workflow) + && Objects.equals(this.subtask, that.subtask); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(workflow); + h *= 1000003; + h ^= Objects.hashCode(subtask); + return h; + } + + /** + * Builder for projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}. + */ + public static class Builder { + private String project; + private String location; + private String workflow; + private String subtask; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getWorkflow() { + return workflow; + } + + public String getSubtask() { + return subtask; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setWorkflow(String workflow) { + this.workflow = workflow; + return this; + } + + public Builder setSubtask(String subtask) { + this.subtask = subtask; + return this; + } + + private Builder(MigrationSubtaskName migrationSubtaskName) { + this.project = migrationSubtaskName.project; + this.location = migrationSubtaskName.location; + this.workflow = migrationSubtaskName.workflow; + this.subtask = migrationSubtaskName.subtask; + } + + public MigrationSubtaskName build() { + return new MigrationSubtaskName(this); + } + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationSubtaskOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationSubtaskOrBuilder.java new file mode 100644 index 0000000..ba4f4a1 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationSubtaskOrBuilder.java @@ -0,0 +1,389 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_entities.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface MigrationSubtaskOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.MigrationSubtask) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Output only. Immutable. The resource name for the migration subtask. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Output only. Immutable. The resource name for the migration subtask. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345/subtasks/678`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * The unique ID of the task to which this subtask belongs.
+   * 
+ * + * string task_id = 2; + * + * @return The taskId. + */ + java.lang.String getTaskId(); + /** + * + * + *
+   * The unique ID of the task to which this subtask belongs.
+   * 
+ * + * string task_id = 2; + * + * @return The bytes for taskId. + */ + com.google.protobuf.ByteString getTaskIdBytes(); + + /** + * + * + *
+   * The type of the Subtask. The migration service does not check whether this
+   * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+   * to ensure it only creates subtasks for which there are compatible workers
+   * polling for Subtasks.
+   * 
+ * + * string type = 3; + * + * @return The type. + */ + java.lang.String getType(); + /** + * + * + *
+   * The type of the Subtask. The migration service does not check whether this
+   * is a known type. It is up to the task creator (i.e. orchestrator or worker)
+   * to ensure it only creates subtasks for which there are compatible workers
+   * polling for Subtasks.
+   * 
+ * + * string type = 3; + * + * @return The bytes for type. + */ + com.google.protobuf.ByteString getTypeBytes(); + + /** + * + * + *
+   * Output only. The current state of the subtask.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + int getStateValue(); + /** + * + * + *
+   * Output only. The current state of the subtask.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationSubtask.State getState(); + + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the processingError field is set. + */ + boolean hasProcessingError(); + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The processingError. + */ + com.google.rpc.ErrorInfo getProcessingError(); + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.rpc.ErrorInfoOrBuilder getProcessingErrorOrBuilder(); + + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + java.util.List + getResourceErrorDetailsList(); + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail getResourceErrorDetails( + int index); + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + int getResourceErrorDetailsCount(); + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + java.util.List + getResourceErrorDetailsOrBuilderList(); + /** + * + * + *
+   * Output only. Provides details to errors and issues encountered while processing the
+   * subtask. Presence of error details does not mean that the subtask failed.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail resource_error_details = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder + getResourceErrorDetailsOrBuilder(int index); + + /** + * + * + *
+   * The number or resources with errors. Note: This is not the total
+   * number of errors as each resource can have more than one error.
+   * This is used to indicate truncation by having a `resource_error_count`
+   * that is higher than the size of `resource_error_details`.
+   * 
+ * + * int32 resource_error_count = 13; + * + * @return The resourceErrorCount. + */ + int getResourceErrorCount(); + + /** + * + * + *
+   * Time when the subtask was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 7; + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Time when the subtask was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 7; + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Time when the subtask was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 7; + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * Time when the subtask was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + * + * @return Whether the lastUpdateTime field is set. + */ + boolean hasLastUpdateTime(); + /** + * + * + *
+   * Time when the subtask was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + * + * @return The lastUpdateTime. + */ + com.google.protobuf.Timestamp getLastUpdateTime(); + /** + * + * + *
+   * Time when the subtask was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 8; + */ + com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder(); + + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + java.util.List getMetricsList(); + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + com.google.cloud.bigquery.migration.v2alpha.TimeSeries getMetrics(int index); + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + int getMetricsCount(); + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + java.util.List + getMetricsOrBuilderList(); + /** + * + * + *
+   * The metrics for the subtask.
+   * 
+ * + * repeated .google.cloud.bigquery.migration.v2alpha.TimeSeries metrics = 11; + */ + com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder getMetricsOrBuilder(int index); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationTask.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationTask.java new file mode 100644 index 0000000..6166685 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationTask.java @@ -0,0 +1,2373 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_entities.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A single task for a migration which has details about the configuration of
+ * the task.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.MigrationTask} + */ +public final class MigrationTask extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.MigrationTask) + MigrationTaskOrBuilder { + private static final long serialVersionUID = 0L; + // Use MigrationTask.newBuilder() to construct. + private MigrationTask(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private MigrationTask() { + id_ = ""; + type_ = ""; + state_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new MigrationTask(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private MigrationTask( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + id_ = s; + break; + } + case 18: + { + java.lang.String s = input.readStringRequireUtf8(); + + type_ = s; + break; + } + case 26: + { + com.google.protobuf.Any.Builder subBuilder = null; + if (details_ != null) { + subBuilder = details_.toBuilder(); + } + details_ = input.readMessage(com.google.protobuf.Any.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(details_); + details_ = subBuilder.buildPartial(); + } + + break; + } + case 32: + { + int rawValue = input.readEnum(); + + state_ = rawValue; + break; + } + case 42: + { + com.google.rpc.ErrorInfo.Builder subBuilder = null; + if (processingError_ != null) { + subBuilder = processingError_.toBuilder(); + } + processingError_ = + input.readMessage(com.google.rpc.ErrorInfo.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(processingError_); + processingError_ = subBuilder.buildPartial(); + } + + break; + } + case 50: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (createTime_ != null) { + subBuilder = createTime_.toBuilder(); + } + createTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(createTime_); + createTime_ = subBuilder.buildPartial(); + } + + break; + } + case 58: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (lastUpdateTime_ != null) { + subBuilder = lastUpdateTime_.toBuilder(); + } + lastUpdateTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(lastUpdateTime_); + lastUpdateTime_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.class, + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.Builder.class); + } + + /** + * + * + *
+   * Possible states of a migration task.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.migration.v2alpha.MigrationTask.State} + */ + public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The state is unspecified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + STATE_UNSPECIFIED(0), + /** + * + * + *
+     * The task is waiting for orchestration.
+     * 
+ * + * PENDING = 1; + */ + PENDING(1), + /** + * + * + *
+     * The task is assigned to an orchestrator.
+     * 
+ * + * ORCHESTRATING = 2; + */ + ORCHESTRATING(2), + /** + * + * + *
+     * The task is running, i.e. its subtasks are ready for execution.
+     * 
+ * + * RUNNING = 3; + */ + RUNNING(3), + /** + * + * + *
+     * Tha task is paused. Assigned subtasks can continue, but no new subtasks
+     * will be scheduled.
+     * 
+ * + * PAUSED = 4; + */ + PAUSED(4), + /** + * + * + *
+     * The task finished successfully.
+     * 
+ * + * SUCCEEDED = 5; + */ + SUCCEEDED(5), + /** + * + * + *
+     * The task finished unsuccessfully.
+     * 
+ * + * FAILED = 6; + */ + FAILED(6), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The state is unspecified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + public static final int STATE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * The task is waiting for orchestration.
+     * 
+ * + * PENDING = 1; + */ + public static final int PENDING_VALUE = 1; + /** + * + * + *
+     * The task is assigned to an orchestrator.
+     * 
+ * + * ORCHESTRATING = 2; + */ + public static final int ORCHESTRATING_VALUE = 2; + /** + * + * + *
+     * The task is running, i.e. its subtasks are ready for execution.
+     * 
+ * + * RUNNING = 3; + */ + public static final int RUNNING_VALUE = 3; + /** + * + * + *
+     * Tha task is paused. Assigned subtasks can continue, but no new subtasks
+     * will be scheduled.
+     * 
+ * + * PAUSED = 4; + */ + public static final int PAUSED_VALUE = 4; + /** + * + * + *
+     * The task finished successfully.
+     * 
+ * + * SUCCEEDED = 5; + */ + public static final int SUCCEEDED_VALUE = 5; + /** + * + * + *
+     * The task finished unsuccessfully.
+     * 
+ * + * FAILED = 6; + */ + public static final int FAILED_VALUE = 6; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static State valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static State forNumber(int value) { + switch (value) { + case 0: + return STATE_UNSPECIFIED; + case 1: + return PENDING; + case 2: + return ORCHESTRATING; + case 3: + return RUNNING; + case 4: + return PAUSED; + case 5: + return SUCCEEDED; + case 6: + return FAILED; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationTask.getDescriptor() + .getEnumTypes() + .get(0); + } + + private static final State[] VALUES = values(); + + public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private State(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.migration.v2alpha.MigrationTask.State) + } + + public static final int ID_FIELD_NUMBER = 1; + private volatile java.lang.Object id_; + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+   * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The id. + */ + @java.lang.Override + public java.lang.String getId() { + java.lang.Object ref = id_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + id_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+   * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for id. + */ + @java.lang.Override + public com.google.protobuf.ByteString getIdBytes() { + java.lang.Object ref = id_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + id_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TYPE_FIELD_NUMBER = 2; + private volatile java.lang.Object type_; + /** + * + * + *
+   * The type of the task. This must be a supported task type.
+   * 
+ * + * string type = 2; + * + * @return The type. + */ + @java.lang.Override + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } + } + /** + * + * + *
+   * The type of the task. This must be a supported task type.
+   * 
+ * + * string type = 2; + * + * @return The bytes for type. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DETAILS_FIELD_NUMBER = 3; + private com.google.protobuf.Any details_; + /** + * + * + *
+   * The details of the task. The type URL must be one of the supported task
+   * details messages and correspond to the Task's type.
+   * 
+ * + * .google.protobuf.Any details = 3; + * + * @return Whether the details field is set. + */ + @java.lang.Override + public boolean hasDetails() { + return details_ != null; + } + /** + * + * + *
+   * The details of the task. The type URL must be one of the supported task
+   * details messages and correspond to the Task's type.
+   * 
+ * + * .google.protobuf.Any details = 3; + * + * @return The details. + */ + @java.lang.Override + public com.google.protobuf.Any getDetails() { + return details_ == null ? com.google.protobuf.Any.getDefaultInstance() : details_; + } + /** + * + * + *
+   * The details of the task. The type URL must be one of the supported task
+   * details messages and correspond to the Task's type.
+   * 
+ * + * .google.protobuf.Any details = 3; + */ + @java.lang.Override + public com.google.protobuf.AnyOrBuilder getDetailsOrBuilder() { + return getDetails(); + } + + public static final int STATE_FIELD_NUMBER = 4; + private int state_; + /** + * + * + *
+   * Output only. The current state of the task.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+   * Output only. The current state of the task.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State result = + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State.valueOf(state_); + return result == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State.UNRECOGNIZED + : result; + } + + public static final int PROCESSING_ERROR_FIELD_NUMBER = 5; + private com.google.rpc.ErrorInfo processingError_; + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the processingError field is set. + */ + @java.lang.Override + public boolean hasProcessingError() { + return processingError_ != null; + } + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The processingError. + */ + @java.lang.Override + public com.google.rpc.ErrorInfo getProcessingError() { + return processingError_ == null + ? com.google.rpc.ErrorInfo.getDefaultInstance() + : processingError_; + } + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.rpc.ErrorInfoOrBuilder getProcessingErrorOrBuilder() { + return getProcessingError(); + } + + public static final int CREATE_TIME_FIELD_NUMBER = 6; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Time when the task was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 6; + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Time when the task was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 6; + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Time when the task was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return getCreateTime(); + } + + public static final int LAST_UPDATE_TIME_FIELD_NUMBER = 7; + private com.google.protobuf.Timestamp lastUpdateTime_; + /** + * + * + *
+   * Time when the task was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + * + * @return Whether the lastUpdateTime field is set. + */ + @java.lang.Override + public boolean hasLastUpdateTime() { + return lastUpdateTime_ != null; + } + /** + * + * + *
+   * Time when the task was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + * + * @return The lastUpdateTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getLastUpdateTime() { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } + /** + * + * + *
+   * Time when the task was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { + return getLastUpdateTime(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); + } + if (!getTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, type_); + } + if (details_ != null) { + output.writeMessage(3, getDetails()); + } + if (state_ + != com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State.STATE_UNSPECIFIED + .getNumber()) { + output.writeEnum(4, state_); + } + if (processingError_ != null) { + output.writeMessage(5, getProcessingError()); + } + if (createTime_ != null) { + output.writeMessage(6, getCreateTime()); + } + if (lastUpdateTime_ != null) { + output.writeMessage(7, getLastUpdateTime()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); + } + if (!getTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, type_); + } + if (details_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDetails()); + } + if (state_ + != com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State.STATE_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, state_); + } + if (processingError_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getProcessingError()); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, getCreateTime()); + } + if (lastUpdateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, getLastUpdateTime()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.MigrationTask)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.MigrationTask other = + (com.google.cloud.bigquery.migration.v2alpha.MigrationTask) obj; + + if (!getId().equals(other.getId())) return false; + if (!getType().equals(other.getType())) return false; + if (hasDetails() != other.hasDetails()) return false; + if (hasDetails()) { + if (!getDetails().equals(other.getDetails())) return false; + } + if (state_ != other.state_) return false; + if (hasProcessingError() != other.hasProcessingError()) return false; + if (hasProcessingError()) { + if (!getProcessingError().equals(other.getProcessingError())) return false; + } + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (hasLastUpdateTime() != other.hasLastUpdateTime()) return false; + if (hasLastUpdateTime()) { + if (!getLastUpdateTime().equals(other.getLastUpdateTime())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + if (hasDetails()) { + hash = (37 * hash) + DETAILS_FIELD_NUMBER; + hash = (53 * hash) + getDetails().hashCode(); + } + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + state_; + if (hasProcessingError()) { + hash = (37 * hash) + PROCESSING_ERROR_FIELD_NUMBER; + hash = (53 * hash) + getProcessingError().hashCode(); + } + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasLastUpdateTime()) { + hash = (37 * hash) + LAST_UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getLastUpdateTime().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.MigrationTask prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A single task for a migration which has details about the configuration of
+   * the task.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.MigrationTask} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.MigrationTask) + com.google.cloud.bigquery.migration.v2alpha.MigrationTaskOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.class, + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.MigrationTask.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + id_ = ""; + + type_ = ""; + + if (detailsBuilder_ == null) { + details_ = null; + } else { + details_ = null; + detailsBuilder_ = null; + } + state_ = 0; + + if (processingErrorBuilder_ == null) { + processingError_ = null; + } else { + processingError_ = null; + processingErrorBuilder_ = null; + } + if (createTimeBuilder_ == null) { + createTime_ = null; + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = null; + } else { + lastUpdateTime_ = null; + lastUpdateTimeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationTask_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationTask.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask build() { + com.google.cloud.bigquery.migration.v2alpha.MigrationTask result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.MigrationTask result = + new com.google.cloud.bigquery.migration.v2alpha.MigrationTask(this); + result.id_ = id_; + result.type_ = type_; + if (detailsBuilder_ == null) { + result.details_ = details_; + } else { + result.details_ = detailsBuilder_.build(); + } + result.state_ = state_; + if (processingErrorBuilder_ == null) { + result.processingError_ = processingError_; + } else { + result.processingError_ = processingErrorBuilder_.build(); + } + if (createTimeBuilder_ == null) { + result.createTime_ = createTime_; + } else { + result.createTime_ = createTimeBuilder_.build(); + } + if (lastUpdateTimeBuilder_ == null) { + result.lastUpdateTime_ = lastUpdateTime_; + } else { + result.lastUpdateTime_ = lastUpdateTimeBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.MigrationTask) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.MigrationTask) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.MigrationTask other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.MigrationTask.getDefaultInstance()) + return this; + if (!other.getId().isEmpty()) { + id_ = other.id_; + onChanged(); + } + if (!other.getType().isEmpty()) { + type_ = other.type_; + onChanged(); + } + if (other.hasDetails()) { + mergeDetails(other.getDetails()); + } + if (other.state_ != 0) { + setStateValue(other.getStateValue()); + } + if (other.hasProcessingError()) { + mergeProcessingError(other.getProcessingError()); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasLastUpdateTime()) { + mergeLastUpdateTime(other.getLastUpdateTime()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.MigrationTask parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.MigrationTask) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object id_ = ""; + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+     * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The id. + */ + public java.lang.String getId() { + java.lang.Object ref = id_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + id_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+     * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for id. + */ + public com.google.protobuf.ByteString getIdBytes() { + java.lang.Object ref = id_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + id_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+     * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @param value The id to set. + * @return This builder for chaining. + */ + public Builder setId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + id_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+     * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return This builder for chaining. + */ + public Builder clearId() { + + id_ = getDefaultInstance().getId(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+     * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @param value The bytes for id to set. + * @return This builder for chaining. + */ + public Builder setIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + id_ = value; + onChanged(); + return this; + } + + private java.lang.Object type_ = ""; + /** + * + * + *
+     * The type of the task. This must be a supported task type.
+     * 
+ * + * string type = 2; + * + * @return The type. + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * The type of the task. This must be a supported task type.
+     * 
+ * + * string type = 2; + * + * @return The bytes for type. + */ + public com.google.protobuf.ByteString getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * The type of the task. This must be a supported task type.
+     * 
+ * + * string type = 2; + * + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + type_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The type of the task. This must be a supported task type.
+     * 
+ * + * string type = 2; + * + * @return This builder for chaining. + */ + public Builder clearType() { + + type_ = getDefaultInstance().getType(); + onChanged(); + return this; + } + /** + * + * + *
+     * The type of the task. This must be a supported task type.
+     * 
+ * + * string type = 2; + * + * @param value The bytes for type to set. + * @return This builder for chaining. + */ + public Builder setTypeBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + type_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.Any details_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Any, + com.google.protobuf.Any.Builder, + com.google.protobuf.AnyOrBuilder> + detailsBuilder_; + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + * + * @return Whether the details field is set. + */ + public boolean hasDetails() { + return detailsBuilder_ != null || details_ != null; + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + * + * @return The details. + */ + public com.google.protobuf.Any getDetails() { + if (detailsBuilder_ == null) { + return details_ == null ? com.google.protobuf.Any.getDefaultInstance() : details_; + } else { + return detailsBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + public Builder setDetails(com.google.protobuf.Any value) { + if (detailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + details_ = value; + onChanged(); + } else { + detailsBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + public Builder setDetails(com.google.protobuf.Any.Builder builderForValue) { + if (detailsBuilder_ == null) { + details_ = builderForValue.build(); + onChanged(); + } else { + detailsBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + public Builder mergeDetails(com.google.protobuf.Any value) { + if (detailsBuilder_ == null) { + if (details_ != null) { + details_ = com.google.protobuf.Any.newBuilder(details_).mergeFrom(value).buildPartial(); + } else { + details_ = value; + } + onChanged(); + } else { + detailsBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + public Builder clearDetails() { + if (detailsBuilder_ == null) { + details_ = null; + onChanged(); + } else { + details_ = null; + detailsBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + public com.google.protobuf.Any.Builder getDetailsBuilder() { + + onChanged(); + return getDetailsFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + public com.google.protobuf.AnyOrBuilder getDetailsOrBuilder() { + if (detailsBuilder_ != null) { + return detailsBuilder_.getMessageOrBuilder(); + } else { + return details_ == null ? com.google.protobuf.Any.getDefaultInstance() : details_; + } + } + /** + * + * + *
+     * The details of the task. The type URL must be one of the supported task
+     * details messages and correspond to the Task's type.
+     * 
+ * + * .google.protobuf.Any details = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Any, + com.google.protobuf.Any.Builder, + com.google.protobuf.AnyOrBuilder> + getDetailsFieldBuilder() { + if (detailsBuilder_ == null) { + detailsBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Any, + com.google.protobuf.Any.Builder, + com.google.protobuf.AnyOrBuilder>(getDetails(), getParentForChildren(), isClean()); + details_ = null; + } + return detailsBuilder_; + } + + private int state_ = 0; + /** + * + * + *
+     * Output only. The current state of the task.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+     * Output only. The current state of the task.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The enum numeric value on the wire for state to set. + * @return This builder for chaining. + */ + public Builder setStateValue(int value) { + + state_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The current state of the task.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State result = + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State.valueOf(state_); + return result == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * Output only. The current state of the task.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState(com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State value) { + if (value == null) { + throw new NullPointerException(); + } + + state_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The current state of the task.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return This builder for chaining. + */ + public Builder clearState() { + + state_ = 0; + onChanged(); + return this; + } + + private com.google.rpc.ErrorInfo processingError_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder> + processingErrorBuilder_; + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the processingError field is set. + */ + public boolean hasProcessingError() { + return processingErrorBuilder_ != null || processingError_ != null; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The processingError. + */ + public com.google.rpc.ErrorInfo getProcessingError() { + if (processingErrorBuilder_ == null) { + return processingError_ == null + ? com.google.rpc.ErrorInfo.getDefaultInstance() + : processingError_; + } else { + return processingErrorBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setProcessingError(com.google.rpc.ErrorInfo value) { + if (processingErrorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + processingError_ = value; + onChanged(); + } else { + processingErrorBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setProcessingError(com.google.rpc.ErrorInfo.Builder builderForValue) { + if (processingErrorBuilder_ == null) { + processingError_ = builderForValue.build(); + onChanged(); + } else { + processingErrorBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeProcessingError(com.google.rpc.ErrorInfo value) { + if (processingErrorBuilder_ == null) { + if (processingError_ != null) { + processingError_ = + com.google.rpc.ErrorInfo.newBuilder(processingError_).mergeFrom(value).buildPartial(); + } else { + processingError_ = value; + } + onChanged(); + } else { + processingErrorBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearProcessingError() { + if (processingErrorBuilder_ == null) { + processingError_ = null; + onChanged(); + } else { + processingError_ = null; + processingErrorBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.rpc.ErrorInfo.Builder getProcessingErrorBuilder() { + + onChanged(); + return getProcessingErrorFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.rpc.ErrorInfoOrBuilder getProcessingErrorOrBuilder() { + if (processingErrorBuilder_ != null) { + return processingErrorBuilder_.getMessageOrBuilder(); + } else { + return processingError_ == null + ? com.google.rpc.ErrorInfo.getDefaultInstance() + : processingError_; + } + } + /** + * + * + *
+     * Output only. An explanation that may be populated when the task is in FAILED state.
+     * 
+ * + * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder> + getProcessingErrorFieldBuilder() { + if (processingErrorBuilder_ == null) { + processingErrorBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ErrorInfo, + com.google.rpc.ErrorInfo.Builder, + com.google.rpc.ErrorInfoOrBuilder>( + getProcessingError(), getParentForChildren(), isClean()); + processingError_ = null; + } + return processingErrorBuilder_; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return createTimeBuilder_ != null || createTime_ != null; + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + onChanged(); + } else { + createTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + onChanged(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (createTime_ != null) { + createTime_ = + com.google.protobuf.Timestamp.newBuilder(createTime_).mergeFrom(value).buildPartial(); + } else { + createTime_ = value; + } + onChanged(); + } else { + createTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + public Builder clearCreateTime() { + if (createTimeBuilder_ == null) { + createTime_ = null; + onChanged(); + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Time when the task was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp lastUpdateTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + lastUpdateTimeBuilder_; + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + * + * @return Whether the lastUpdateTime field is set. + */ + public boolean hasLastUpdateTime() { + return lastUpdateTimeBuilder_ != null || lastUpdateTime_ != null; + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + * + * @return The lastUpdateTime. + */ + public com.google.protobuf.Timestamp getLastUpdateTime() { + if (lastUpdateTimeBuilder_ == null) { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } else { + return lastUpdateTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + public Builder setLastUpdateTime(com.google.protobuf.Timestamp value) { + if (lastUpdateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + lastUpdateTime_ = value; + onChanged(); + } else { + lastUpdateTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + public Builder setLastUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = builderForValue.build(); + onChanged(); + } else { + lastUpdateTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + public Builder mergeLastUpdateTime(com.google.protobuf.Timestamp value) { + if (lastUpdateTimeBuilder_ == null) { + if (lastUpdateTime_ != null) { + lastUpdateTime_ = + com.google.protobuf.Timestamp.newBuilder(lastUpdateTime_) + .mergeFrom(value) + .buildPartial(); + } else { + lastUpdateTime_ = value; + } + onChanged(); + } else { + lastUpdateTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + public Builder clearLastUpdateTime() { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = null; + onChanged(); + } else { + lastUpdateTime_ = null; + lastUpdateTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + public com.google.protobuf.Timestamp.Builder getLastUpdateTimeBuilder() { + + onChanged(); + return getLastUpdateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { + if (lastUpdateTimeBuilder_ != null) { + return lastUpdateTimeBuilder_.getMessageOrBuilder(); + } else { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } + } + /** + * + * + *
+     * Time when the task was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getLastUpdateTimeFieldBuilder() { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getLastUpdateTime(), getParentForChildren(), isClean()); + lastUpdateTime_ = null; + } + return lastUpdateTimeBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.MigrationTask) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.MigrationTask) + private static final com.google.cloud.bigquery.migration.v2alpha.MigrationTask DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.MigrationTask(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationTask getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public MigrationTask parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MigrationTask(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationTaskOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationTaskOrBuilder.java new file mode 100644 index 0000000..b27d4be --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationTaskOrBuilder.java @@ -0,0 +1,254 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_entities.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface MigrationTaskOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.MigrationTask) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+   * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The id. + */ + java.lang.String getId(); + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration task. The ID is server-generated.
+   * 
+ * + * + * string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for id. + */ + com.google.protobuf.ByteString getIdBytes(); + + /** + * + * + *
+   * The type of the task. This must be a supported task type.
+   * 
+ * + * string type = 2; + * + * @return The type. + */ + java.lang.String getType(); + /** + * + * + *
+   * The type of the task. This must be a supported task type.
+   * 
+ * + * string type = 2; + * + * @return The bytes for type. + */ + com.google.protobuf.ByteString getTypeBytes(); + + /** + * + * + *
+   * The details of the task. The type URL must be one of the supported task
+   * details messages and correspond to the Task's type.
+   * 
+ * + * .google.protobuf.Any details = 3; + * + * @return Whether the details field is set. + */ + boolean hasDetails(); + /** + * + * + *
+   * The details of the task. The type URL must be one of the supported task
+   * details messages and correspond to the Task's type.
+   * 
+ * + * .google.protobuf.Any details = 3; + * + * @return The details. + */ + com.google.protobuf.Any getDetails(); + /** + * + * + *
+   * The details of the task. The type URL must be one of the supported task
+   * details messages and correspond to the Task's type.
+   * 
+ * + * .google.protobuf.Any details = 3; + */ + com.google.protobuf.AnyOrBuilder getDetailsOrBuilder(); + + /** + * + * + *
+   * Output only. The current state of the task.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + int getStateValue(); + /** + * + * + *
+   * Output only. The current state of the task.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationTask.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationTask.State getState(); + + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the processingError field is set. + */ + boolean hasProcessingError(); + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The processingError. + */ + com.google.rpc.ErrorInfo getProcessingError(); + /** + * + * + *
+   * Output only. An explanation that may be populated when the task is in FAILED state.
+   * 
+ * + * .google.rpc.ErrorInfo processing_error = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.rpc.ErrorInfoOrBuilder getProcessingErrorOrBuilder(); + + /** + * + * + *
+   * Time when the task was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 6; + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Time when the task was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 6; + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Time when the task was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 6; + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * Time when the task was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + * + * @return Whether the lastUpdateTime field is set. + */ + boolean hasLastUpdateTime(); + /** + * + * + *
+   * Time when the task was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + * + * @return The lastUpdateTime. + */ + com.google.protobuf.Timestamp getLastUpdateTime(); + /** + * + * + *
+   * Time when the task was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 7; + */ + com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflow.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflow.java new file mode 100644 index 0000000..0260d30 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflow.java @@ -0,0 +1,2186 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_entities.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A migration workflow which specifies what needs to be done for an EDW
+ * migration.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} + */ +public final class MigrationWorkflow extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) + MigrationWorkflowOrBuilder { + private static final long serialVersionUID = 0L; + // Use MigrationWorkflow.newBuilder() to construct. + private MigrationWorkflow(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private MigrationWorkflow() { + name_ = ""; + displayName_ = ""; + state_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new MigrationWorkflow(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private MigrationWorkflow( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 18: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + tasks_ = + com.google.protobuf.MapField.newMapField(TasksDefaultEntryHolder.defaultEntry); + mutable_bitField0_ |= 0x00000001; + } + com.google.protobuf.MapEntry< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + tasks__ = + input.readMessage( + TasksDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + tasks_.getMutableMap().put(tasks__.getKey(), tasks__.getValue()); + break; + } + case 24: + { + int rawValue = input.readEnum(); + + state_ = rawValue; + break; + } + case 34: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (createTime_ != null) { + subBuilder = createTime_.toBuilder(); + } + createTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(createTime_); + createTime_ = subBuilder.buildPartial(); + } + + break; + } + case 42: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (lastUpdateTime_ != null) { + subBuilder = lastUpdateTime_.toBuilder(); + } + lastUpdateTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(lastUpdateTime_); + lastUpdateTime_ = subBuilder.buildPartial(); + } + + break; + } + case 50: + { + java.lang.String s = input.readStringRequireUtf8(); + + displayName_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 2: + return internalGetTasks(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.class, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder.class); + } + + /** + * + * + *
+   * Possible migration workflow states.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State} + */ + public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * Workflow state is unspecified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + STATE_UNSPECIFIED(0), + /** + * + * + *
+     * Workflow is in draft status, i.e. tasks are not yet eligible for
+     * execution.
+     * 
+ * + * DRAFT = 1; + */ + DRAFT(1), + /** + * + * + *
+     * Workflow is running (i.e. tasks are eligible for execution).
+     * 
+ * + * RUNNING = 2; + */ + RUNNING(2), + /** + * + * + *
+     * Workflow is paused. Tasks currently in progress may continue, but no
+     * further tasks will be scheduled.
+     * 
+ * + * PAUSED = 3; + */ + PAUSED(3), + /** + * + * + *
+     * Workflow is complete. There should not be any task in a non-terminal
+     * state, but if they are (e.g. forced termination), they will not be
+     * scheduled.
+     * 
+ * + * COMPLETED = 4; + */ + COMPLETED(4), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * Workflow state is unspecified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + public static final int STATE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Workflow is in draft status, i.e. tasks are not yet eligible for
+     * execution.
+     * 
+ * + * DRAFT = 1; + */ + public static final int DRAFT_VALUE = 1; + /** + * + * + *
+     * Workflow is running (i.e. tasks are eligible for execution).
+     * 
+ * + * RUNNING = 2; + */ + public static final int RUNNING_VALUE = 2; + /** + * + * + *
+     * Workflow is paused. Tasks currently in progress may continue, but no
+     * further tasks will be scheduled.
+     * 
+ * + * PAUSED = 3; + */ + public static final int PAUSED_VALUE = 3; + /** + * + * + *
+     * Workflow is complete. There should not be any task in a non-terminal
+     * state, but if they are (e.g. forced termination), they will not be
+     * scheduled.
+     * 
+ * + * COMPLETED = 4; + */ + public static final int COMPLETED_VALUE = 4; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static State valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static State forNumber(int value) { + switch (value) { + case 0: + return STATE_UNSPECIFIED; + case 1: + return DRAFT; + case 2: + return RUNNING; + case 3: + return PAUSED; + case 4: + return COMPLETED; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDescriptor() + .getEnumTypes() + .get(0); + } + + private static final State[] VALUES = values(); + + public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private State(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State) + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DISPLAY_NAME_FIELD_NUMBER = 6; + private volatile java.lang.Object displayName_; + /** + * + * + *
+   * The display name of the workflow. This can be set to give a workflow
+   * a descriptive name. There is no guarantee or enforcement of uniqueness.
+   * 
+ * + * string display_name = 6; + * + * @return The displayName. + */ + @java.lang.Override + public java.lang.String getDisplayName() { + java.lang.Object ref = displayName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + displayName_ = s; + return s; + } + } + /** + * + * + *
+   * The display name of the workflow. This can be set to give a workflow
+   * a descriptive name. There is no guarantee or enforcement of uniqueness.
+   * 
+ * + * string display_name = 6; + * + * @return The bytes for displayName. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDisplayNameBytes() { + java.lang.Object ref = displayName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + displayName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TASKS_FIELD_NUMBER = 2; + + private static final class TasksDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + defaultEntry = + com.google.protobuf.MapEntry + . + newDefaultInstance( + com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_TasksEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + com.google.cloud.bigquery.migration.v2alpha.MigrationTask + .getDefaultInstance()); + } + + private com.google.protobuf.MapField< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + tasks_; + + private com.google.protobuf.MapField< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + internalGetTasks() { + if (tasks_ == null) { + return com.google.protobuf.MapField.emptyMapField(TasksDefaultEntryHolder.defaultEntry); + } + return tasks_; + } + + public int getTasksCount() { + return internalGetTasks().getMap().size(); + } + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public boolean containsTasks(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + return internalGetTasks().getMap().containsKey(key); + } + /** Use {@link #getTasksMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map + getTasks() { + return getTasksMap(); + } + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public java.util.Map + getTasksMap() { + return internalGetTasks().getMap(); + } + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask getTasksOrDefault( + java.lang.String key, + com.google.cloud.bigquery.migration.v2alpha.MigrationTask defaultValue) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map map = + internalGetTasks().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask getTasksOrThrow( + java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map map = + internalGetTasks().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int STATE_FIELD_NUMBER = 3; + private int state_; + /** + * + * + *
+   * Output only. That status of the workflow.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+   * Output only. That status of the workflow.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State result = + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State.valueOf(state_); + return result == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State.UNRECOGNIZED + : result; + } + + public static final int CREATE_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Time when the workflow was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Time when the workflow was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Time when the workflow was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return getCreateTime(); + } + + public static final int LAST_UPDATE_TIME_FIELD_NUMBER = 5; + private com.google.protobuf.Timestamp lastUpdateTime_; + /** + * + * + *
+   * Time when the workflow was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + * + * @return Whether the lastUpdateTime field is set. + */ + @java.lang.Override + public boolean hasLastUpdateTime() { + return lastUpdateTime_ != null; + } + /** + * + * + *
+   * Time when the workflow was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + * + * @return The lastUpdateTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getLastUpdateTime() { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } + /** + * + * + *
+   * Time when the workflow was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { + return getLastUpdateTime(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + output, internalGetTasks(), TasksDefaultEntryHolder.defaultEntry, 2); + if (state_ + != com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State.STATE_UNSPECIFIED + .getNumber()) { + output.writeEnum(3, state_); + } + if (createTime_ != null) { + output.writeMessage(4, getCreateTime()); + } + if (lastUpdateTime_ != null) { + output.writeMessage(5, getLastUpdateTime()); + } + if (!getDisplayNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, displayName_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + for (java.util.Map.Entry< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + entry : internalGetTasks().getMap().entrySet()) { + com.google.protobuf.MapEntry< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + tasks__ = + TasksDefaultEntryHolder.defaultEntry + .newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, tasks__); + } + if (state_ + != com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State.STATE_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, state_); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getCreateTime()); + } + if (lastUpdateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getLastUpdateTime()); + } + if (!getDisplayNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, displayName_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow other = + (com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) obj; + + if (!getName().equals(other.getName())) return false; + if (!getDisplayName().equals(other.getDisplayName())) return false; + if (!internalGetTasks().equals(other.internalGetTasks())) return false; + if (state_ != other.state_) return false; + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (hasLastUpdateTime() != other.hasLastUpdateTime()) return false; + if (hasLastUpdateTime()) { + if (!getLastUpdateTime().equals(other.getLastUpdateTime())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; + hash = (53 * hash) + getDisplayName().hashCode(); + if (!internalGetTasks().getMap().isEmpty()) { + hash = (37 * hash) + TASKS_FIELD_NUMBER; + hash = (53 * hash) + internalGetTasks().hashCode(); + } + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + state_; + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasLastUpdateTime()) { + hash = (37 * hash) + LAST_UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getLastUpdateTime().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A migration workflow which specifies what needs to be done for an EDW
+   * migration.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflowOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 2: + return internalGetTasks(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + switch (number) { + case 2: + return internalGetMutableTasks(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.class, + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + displayName_ = ""; + + internalGetMutableTasks().clear(); + state_ = 0; + + if (createTimeBuilder_ == null) { + createTime_ = null; + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = null; + } else { + lastUpdateTime_ = null; + lastUpdateTimeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationEntitiesProto + .internal_static_google_cloud_bigquery_migration_v2alpha_MigrationWorkflow_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow build() { + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow result = + new com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow(this); + int from_bitField0_ = bitField0_; + result.name_ = name_; + result.displayName_ = displayName_; + result.tasks_ = internalGetTasks(); + result.tasks_.makeImmutable(); + result.state_ = state_; + if (createTimeBuilder_ == null) { + result.createTime_ = createTime_; + } else { + result.createTime_ = createTimeBuilder_.build(); + } + if (lastUpdateTimeBuilder_ == null) { + result.lastUpdateTime_ = lastUpdateTime_; + } else { + result.lastUpdateTime_ = lastUpdateTimeBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getDisplayName().isEmpty()) { + displayName_ = other.displayName_; + onChanged(); + } + internalGetMutableTasks().mergeFrom(other.internalGetTasks()); + if (other.state_ != 0) { + setStateValue(other.getStateValue()); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasLastUpdateTime()) { + mergeLastUpdateTime(other.getLastUpdateTime()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+     * server-generated.
+     * Example: `projects/123/locations/us/workflows/345`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object displayName_ = ""; + /** + * + * + *
+     * The display name of the workflow. This can be set to give a workflow
+     * a descriptive name. There is no guarantee or enforcement of uniqueness.
+     * 
+ * + * string display_name = 6; + * + * @return The displayName. + */ + public java.lang.String getDisplayName() { + java.lang.Object ref = displayName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + displayName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * The display name of the workflow. This can be set to give a workflow
+     * a descriptive name. There is no guarantee or enforcement of uniqueness.
+     * 
+ * + * string display_name = 6; + * + * @return The bytes for displayName. + */ + public com.google.protobuf.ByteString getDisplayNameBytes() { + java.lang.Object ref = displayName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + displayName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * The display name of the workflow. This can be set to give a workflow
+     * a descriptive name. There is no guarantee or enforcement of uniqueness.
+     * 
+ * + * string display_name = 6; + * + * @param value The displayName to set. + * @return This builder for chaining. + */ + public Builder setDisplayName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + displayName_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The display name of the workflow. This can be set to give a workflow
+     * a descriptive name. There is no guarantee or enforcement of uniqueness.
+     * 
+ * + * string display_name = 6; + * + * @return This builder for chaining. + */ + public Builder clearDisplayName() { + + displayName_ = getDefaultInstance().getDisplayName(); + onChanged(); + return this; + } + /** + * + * + *
+     * The display name of the workflow. This can be set to give a workflow
+     * a descriptive name. There is no guarantee or enforcement of uniqueness.
+     * 
+ * + * string display_name = 6; + * + * @param value The bytes for displayName to set. + * @return This builder for chaining. + */ + public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + displayName_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + tasks_; + + private com.google.protobuf.MapField< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + internalGetTasks() { + if (tasks_ == null) { + return com.google.protobuf.MapField.emptyMapField(TasksDefaultEntryHolder.defaultEntry); + } + return tasks_; + } + + private com.google.protobuf.MapField< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + internalGetMutableTasks() { + onChanged(); + ; + if (tasks_ == null) { + tasks_ = com.google.protobuf.MapField.newMapField(TasksDefaultEntryHolder.defaultEntry); + } + if (!tasks_.isMutable()) { + tasks_ = tasks_.copy(); + } + return tasks_; + } + + public int getTasksCount() { + return internalGetTasks().getMap().size(); + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public boolean containsTasks(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + return internalGetTasks().getMap().containsKey(key); + } + /** Use {@link #getTasksMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + getTasks() { + return getTasksMap(); + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public java.util.Map< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + getTasksMap() { + return internalGetTasks().getMap(); + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask getTasksOrDefault( + java.lang.String key, + com.google.cloud.bigquery.migration.v2alpha.MigrationTask defaultValue) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map + map = internalGetTasks().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationTask getTasksOrThrow( + java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + java.util.Map + map = internalGetTasks().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearTasks() { + internalGetMutableTasks().getMutableMap().clear(); + return this; + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + public Builder removeTasks(java.lang.String key) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + internalGetMutableTasks().getMutableMap().remove(key); + return this; + } + /** Use alternate mutation accessors instead. */ + @java.lang.Deprecated + public java.util.Map< + java.lang.String, com.google.cloud.bigquery.migration.v2alpha.MigrationTask> + getMutableTasks() { + return internalGetMutableTasks().getMutableMap(); + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + public Builder putTasks( + java.lang.String key, com.google.cloud.bigquery.migration.v2alpha.MigrationTask value) { + if (key == null) { + throw new java.lang.NullPointerException(); + } + if (value == null) { + throw new java.lang.NullPointerException(); + } + internalGetMutableTasks().getMutableMap().put(key, value); + return this; + } + /** + * + * + *
+     * The tasks in a workflow in a named map. The name (i.e. key) has no
+     * meaning and is merely a convenient way to address a specific task
+     * in a workflow.
+     * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + public Builder putAllTasks( + java.util.Map + values) { + internalGetMutableTasks().getMutableMap().putAll(values); + return this; + } + + private int state_ = 0; + /** + * + * + *
+     * Output only. That status of the workflow.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+     * Output only. That status of the workflow.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The enum numeric value on the wire for state to set. + * @return This builder for chaining. + */ + public Builder setStateValue(int value) { + + state_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. That status of the workflow.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State getState() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State result = + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State.valueOf(state_); + return result == null + ? com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * Output only. That status of the workflow.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState( + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State value) { + if (value == null) { + throw new NullPointerException(); + } + + state_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. That status of the workflow.
+     * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return This builder for chaining. + */ + public Builder clearState() { + + state_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return createTimeBuilder_ != null || createTime_ != null; + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + onChanged(); + } else { + createTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + onChanged(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (createTime_ != null) { + createTime_ = + com.google.protobuf.Timestamp.newBuilder(createTime_).mergeFrom(value).buildPartial(); + } else { + createTime_ = value; + } + onChanged(); + } else { + createTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public Builder clearCreateTime() { + if (createTimeBuilder_ == null) { + createTime_ = null; + onChanged(); + } else { + createTime_ = null; + createTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Time when the workflow was created.
+     * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp lastUpdateTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + lastUpdateTimeBuilder_; + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + * + * @return Whether the lastUpdateTime field is set. + */ + public boolean hasLastUpdateTime() { + return lastUpdateTimeBuilder_ != null || lastUpdateTime_ != null; + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + * + * @return The lastUpdateTime. + */ + public com.google.protobuf.Timestamp getLastUpdateTime() { + if (lastUpdateTimeBuilder_ == null) { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } else { + return lastUpdateTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + public Builder setLastUpdateTime(com.google.protobuf.Timestamp value) { + if (lastUpdateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + lastUpdateTime_ = value; + onChanged(); + } else { + lastUpdateTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + public Builder setLastUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = builderForValue.build(); + onChanged(); + } else { + lastUpdateTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + public Builder mergeLastUpdateTime(com.google.protobuf.Timestamp value) { + if (lastUpdateTimeBuilder_ == null) { + if (lastUpdateTime_ != null) { + lastUpdateTime_ = + com.google.protobuf.Timestamp.newBuilder(lastUpdateTime_) + .mergeFrom(value) + .buildPartial(); + } else { + lastUpdateTime_ = value; + } + onChanged(); + } else { + lastUpdateTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + public Builder clearLastUpdateTime() { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTime_ = null; + onChanged(); + } else { + lastUpdateTime_ = null; + lastUpdateTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + public com.google.protobuf.Timestamp.Builder getLastUpdateTimeBuilder() { + + onChanged(); + return getLastUpdateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { + if (lastUpdateTimeBuilder_ != null) { + return lastUpdateTimeBuilder_.getMessageOrBuilder(); + } else { + return lastUpdateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : lastUpdateTime_; + } + } + /** + * + * + *
+     * Time when the workflow was last updated.
+     * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getLastUpdateTimeFieldBuilder() { + if (lastUpdateTimeBuilder_ == null) { + lastUpdateTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getLastUpdateTime(), getParentForChildren(), isClean()); + lastUpdateTime_ = null; + } + return lastUpdateTimeBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) + private static final com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public MigrationWorkflow parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MigrationWorkflow(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflowName.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflowName.java new file mode 100644 index 0000000..18050bb --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflowName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.migration.v2alpha; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class MigrationWorkflowName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_WORKFLOW = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/workflows/{workflow}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String workflow; + + @Deprecated + protected MigrationWorkflowName() { + project = null; + location = null; + workflow = null; + } + + private MigrationWorkflowName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + workflow = Preconditions.checkNotNull(builder.getWorkflow()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getWorkflow() { + return workflow; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static MigrationWorkflowName of(String project, String location, String workflow) { + return newBuilder().setProject(project).setLocation(location).setWorkflow(workflow).build(); + } + + public static String format(String project, String location, String workflow) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setWorkflow(workflow) + .build() + .toString(); + } + + public static MigrationWorkflowName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_WORKFLOW.validatedMatch( + formattedString, "MigrationWorkflowName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("workflow")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (MigrationWorkflowName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_WORKFLOW.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (workflow != null) { + fieldMapBuilder.put("workflow", workflow); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_WORKFLOW.instantiate( + "project", project, "location", location, "workflow", workflow); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + MigrationWorkflowName that = ((MigrationWorkflowName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.workflow, that.workflow); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(workflow); + return h; + } + + /** Builder for projects/{project}/locations/{location}/workflows/{workflow}. */ + public static class Builder { + private String project; + private String location; + private String workflow; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getWorkflow() { + return workflow; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setWorkflow(String workflow) { + this.workflow = workflow; + return this; + } + + private Builder(MigrationWorkflowName migrationWorkflowName) { + this.project = migrationWorkflowName.project; + this.location = migrationWorkflowName.location; + this.workflow = migrationWorkflowName.workflow; + } + + public MigrationWorkflowName build() { + return new MigrationWorkflowName(this); + } + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflowOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflowOrBuilder.java new file mode 100644 index 0000000..581cd5a --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/MigrationWorkflowOrBuilder.java @@ -0,0 +1,256 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_entities.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface MigrationWorkflowOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Output only. Immutable. The unique identifier for the migration workflow. The ID is
+   * server-generated.
+   * Example: `projects/123/locations/us/workflows/345`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE]; + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * The display name of the workflow. This can be set to give a workflow
+   * a descriptive name. There is no guarantee or enforcement of uniqueness.
+   * 
+ * + * string display_name = 6; + * + * @return The displayName. + */ + java.lang.String getDisplayName(); + /** + * + * + *
+   * The display name of the workflow. This can be set to give a workflow
+   * a descriptive name. There is no guarantee or enforcement of uniqueness.
+   * 
+ * + * string display_name = 6; + * + * @return The bytes for displayName. + */ + com.google.protobuf.ByteString getDisplayNameBytes(); + + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + int getTasksCount(); + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + boolean containsTasks(java.lang.String key); + /** Use {@link #getTasksMap()} instead. */ + @java.lang.Deprecated + java.util.Map + getTasks(); + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + java.util.Map + getTasksMap(); + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationTask getTasksOrDefault( + java.lang.String key, com.google.cloud.bigquery.migration.v2alpha.MigrationTask defaultValue); + /** + * + * + *
+   * The tasks in a workflow in a named map. The name (i.e. key) has no
+   * meaning and is merely a convenient way to address a specific task
+   * in a workflow.
+   * 
+ * + * map<string, .google.cloud.bigquery.migration.v2alpha.MigrationTask> tasks = 2; + * + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationTask getTasksOrThrow(java.lang.String key); + + /** + * + * + *
+   * Output only. That status of the workflow.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + int getStateValue(); + /** + * + * + *
+   * Output only. That status of the workflow.
+   * 
+ * + * + * .google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + com.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow.State getState(); + + /** + * + * + *
+   * Time when the workflow was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Time when the workflow was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Time when the workflow was created.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 4; + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * Time when the workflow was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + * + * @return Whether the lastUpdateTime field is set. + */ + boolean hasLastUpdateTime(); + /** + * + * + *
+   * Time when the workflow was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + * + * @return The lastUpdateTime. + */ + com.google.protobuf.Timestamp getLastUpdateTime(); + /** + * + * + *
+   * Time when the workflow was last updated.
+   * 
+ * + * .google.protobuf.Timestamp last_update_time = 5; + */ + com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/Point.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/Point.java new file mode 100644 index 0000000..a1058de --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/Point.java @@ -0,0 +1,1111 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A single data point in a time series.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.Point} + */ +public final class Point extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.Point) + PointOrBuilder { + private static final long serialVersionUID = 0L; + // Use Point.newBuilder() to construct. + private Point(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Point() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new Point(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private Point( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder subBuilder = null; + if (interval_ != null) { + subBuilder = interval_.toBuilder(); + } + interval_ = + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(interval_); + interval_ = subBuilder.buildPartial(); + } + + break; + } + case 18: + { + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder subBuilder = null; + if (value_ != null) { + subBuilder = value_.toBuilder(); + } + value_ = + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.TypedValue.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(value_); + value_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_Point_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_Point_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.Point.class, + com.google.cloud.bigquery.migration.v2alpha.Point.Builder.class); + } + + public static final int INTERVAL_FIELD_NUMBER = 1; + private com.google.cloud.bigquery.migration.v2alpha.TimeInterval interval_; + /** + * + * + *
+   * The time interval to which the data point applies.  For `GAUGE` metrics,
+   * the start time does not need to be supplied, but if it is supplied, it must
+   * equal the end time.  For `DELTA` metrics, the start and end time should
+   * specify a non-zero interval, with subsequent points specifying contiguous
+   * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+   * time should specify a non-zero interval, with subsequent points specifying
+   * the same start time and increasing end times, until an event resets the
+   * cumulative value to zero and sets a new start time for the following
+   * points.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + * + * @return Whether the interval field is set. + */ + @java.lang.Override + public boolean hasInterval() { + return interval_ != null; + } + /** + * + * + *
+   * The time interval to which the data point applies.  For `GAUGE` metrics,
+   * the start time does not need to be supplied, but if it is supplied, it must
+   * equal the end time.  For `DELTA` metrics, the start and end time should
+   * specify a non-zero interval, with subsequent points specifying contiguous
+   * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+   * time should specify a non-zero interval, with subsequent points specifying
+   * the same start time and increasing end times, until an event resets the
+   * cumulative value to zero and sets a new start time for the following
+   * points.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + * + * @return The interval. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval getInterval() { + return interval_ == null + ? com.google.cloud.bigquery.migration.v2alpha.TimeInterval.getDefaultInstance() + : interval_; + } + /** + * + * + *
+   * The time interval to which the data point applies.  For `GAUGE` metrics,
+   * the start time does not need to be supplied, but if it is supplied, it must
+   * equal the end time.  For `DELTA` metrics, the start and end time should
+   * specify a non-zero interval, with subsequent points specifying contiguous
+   * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+   * time should specify a non-zero interval, with subsequent points specifying
+   * the same start time and increasing end times, until an event resets the
+   * cumulative value to zero and sets a new start time for the following
+   * points.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder getIntervalOrBuilder() { + return getInterval(); + } + + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.cloud.bigquery.migration.v2alpha.TypedValue value_; + /** + * + * + *
+   * The value of the data point.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + * + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return value_ != null; + } + /** + * + * + *
+   * The value of the data point.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + * + * @return The value. + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TypedValue getValue() { + return value_ == null + ? com.google.cloud.bigquery.migration.v2alpha.TypedValue.getDefaultInstance() + : value_; + } + /** + * + * + *
+   * The value of the data point.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder getValueOrBuilder() { + return getValue(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (interval_ != null) { + output.writeMessage(1, getInterval()); + } + if (value_ != null) { + output.writeMessage(2, getValue()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (interval_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getInterval()); + } + if (value_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getValue()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.Point)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.Point other = + (com.google.cloud.bigquery.migration.v2alpha.Point) obj; + + if (hasInterval() != other.hasInterval()) return false; + if (hasInterval()) { + if (!getInterval().equals(other.getInterval())) return false; + } + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (!getValue().equals(other.getValue())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasInterval()) { + hash = (37 * hash) + INTERVAL_FIELD_NUMBER; + hash = (53 * hash) + getInterval().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(com.google.cloud.bigquery.migration.v2alpha.Point prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A single data point in a time series.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.Point} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.Point) + com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_Point_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_Point_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.Point.class, + com.google.cloud.bigquery.migration.v2alpha.Point.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.Point.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (intervalBuilder_ == null) { + interval_ = null; + } else { + interval_ = null; + intervalBuilder_ = null; + } + if (valueBuilder_ == null) { + value_ = null; + } else { + value_ = null; + valueBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_Point_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.Point getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.Point.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.Point build() { + com.google.cloud.bigquery.migration.v2alpha.Point result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.Point buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.Point result = + new com.google.cloud.bigquery.migration.v2alpha.Point(this); + if (intervalBuilder_ == null) { + result.interval_ = interval_; + } else { + result.interval_ = intervalBuilder_.build(); + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.Point) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.Point) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.Point other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.Point.getDefaultInstance()) + return this; + if (other.hasInterval()) { + mergeInterval(other.getInterval()); + } + if (other.hasValue()) { + mergeValue(other.getValue()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.Point parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.Point) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private com.google.cloud.bigquery.migration.v2alpha.TimeInterval interval_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TimeInterval, + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder, + com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder> + intervalBuilder_; + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + * + * @return Whether the interval field is set. + */ + public boolean hasInterval() { + return intervalBuilder_ != null || interval_ != null; + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + * + * @return The interval. + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval getInterval() { + if (intervalBuilder_ == null) { + return interval_ == null + ? com.google.cloud.bigquery.migration.v2alpha.TimeInterval.getDefaultInstance() + : interval_; + } else { + return intervalBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + public Builder setInterval(com.google.cloud.bigquery.migration.v2alpha.TimeInterval value) { + if (intervalBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + interval_ = value; + onChanged(); + } else { + intervalBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + public Builder setInterval( + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder builderForValue) { + if (intervalBuilder_ == null) { + interval_ = builderForValue.build(); + onChanged(); + } else { + intervalBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + public Builder mergeInterval(com.google.cloud.bigquery.migration.v2alpha.TimeInterval value) { + if (intervalBuilder_ == null) { + if (interval_ != null) { + interval_ = + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.newBuilder(interval_) + .mergeFrom(value) + .buildPartial(); + } else { + interval_ = value; + } + onChanged(); + } else { + intervalBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + public Builder clearInterval() { + if (intervalBuilder_ == null) { + interval_ = null; + onChanged(); + } else { + interval_ = null; + intervalBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder getIntervalBuilder() { + + onChanged(); + return getIntervalFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + public com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder + getIntervalOrBuilder() { + if (intervalBuilder_ != null) { + return intervalBuilder_.getMessageOrBuilder(); + } else { + return interval_ == null + ? com.google.cloud.bigquery.migration.v2alpha.TimeInterval.getDefaultInstance() + : interval_; + } + } + /** + * + * + *
+     * The time interval to which the data point applies.  For `GAUGE` metrics,
+     * the start time does not need to be supplied, but if it is supplied, it must
+     * equal the end time.  For `DELTA` metrics, the start and end time should
+     * specify a non-zero interval, with subsequent points specifying contiguous
+     * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+     * time should specify a non-zero interval, with subsequent points specifying
+     * the same start time and increasing end times, until an event resets the
+     * cumulative value to zero and sets a new start time for the following
+     * points.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TimeInterval, + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder, + com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder> + getIntervalFieldBuilder() { + if (intervalBuilder_ == null) { + intervalBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TimeInterval, + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder, + com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder>( + getInterval(), getParentForChildren(), isClean()); + interval_ = null; + } + return intervalBuilder_; + } + + private com.google.cloud.bigquery.migration.v2alpha.TypedValue value_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TypedValue, + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder, + com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder> + valueBuilder_; + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + * + * @return Whether the value field is set. + */ + public boolean hasValue() { + return valueBuilder_ != null || value_ != null; + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + * + * @return The value. + */ + public com.google.cloud.bigquery.migration.v2alpha.TypedValue getValue() { + if (valueBuilder_ == null) { + return value_ == null + ? com.google.cloud.bigquery.migration.v2alpha.TypedValue.getDefaultInstance() + : value_; + } else { + return valueBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + public Builder setValue(com.google.cloud.bigquery.migration.v2alpha.TypedValue value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + public Builder setValue( + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + public Builder mergeValue(com.google.cloud.bigquery.migration.v2alpha.TypedValue value) { + if (valueBuilder_ == null) { + if (value_ != null) { + value_ = + com.google.cloud.bigquery.migration.v2alpha.TypedValue.newBuilder(value_) + .mergeFrom(value) + .buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = null; + onChanged(); + } else { + value_ = null; + valueBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + public com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder getValueBuilder() { + + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + public com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_ == null + ? com.google.cloud.bigquery.migration.v2alpha.TypedValue.getDefaultInstance() + : value_; + } + } + /** + * + * + *
+     * The value of the data point.
+     * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TypedValue, + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder, + com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.TypedValue, + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder, + com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder>( + getValue(), getParentForChildren(), isClean()); + value_ = null; + } + return valueBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.Point) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.Point) + private static final com.google.cloud.bigquery.migration.v2alpha.Point DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.Point(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.Point getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Point parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Point(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.Point getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/PointOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/PointOrBuilder.java new file mode 100644 index 0000000..a7c5095 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/PointOrBuilder.java @@ -0,0 +1,119 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface PointOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.Point) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The time interval to which the data point applies.  For `GAUGE` metrics,
+   * the start time does not need to be supplied, but if it is supplied, it must
+   * equal the end time.  For `DELTA` metrics, the start and end time should
+   * specify a non-zero interval, with subsequent points specifying contiguous
+   * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+   * time should specify a non-zero interval, with subsequent points specifying
+   * the same start time and increasing end times, until an event resets the
+   * cumulative value to zero and sets a new start time for the following
+   * points.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + * + * @return Whether the interval field is set. + */ + boolean hasInterval(); + /** + * + * + *
+   * The time interval to which the data point applies.  For `GAUGE` metrics,
+   * the start time does not need to be supplied, but if it is supplied, it must
+   * equal the end time.  For `DELTA` metrics, the start and end time should
+   * specify a non-zero interval, with subsequent points specifying contiguous
+   * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+   * time should specify a non-zero interval, with subsequent points specifying
+   * the same start time and increasing end times, until an event resets the
+   * cumulative value to zero and sets a new start time for the following
+   * points.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + * + * @return The interval. + */ + com.google.cloud.bigquery.migration.v2alpha.TimeInterval getInterval(); + /** + * + * + *
+   * The time interval to which the data point applies.  For `GAUGE` metrics,
+   * the start time does not need to be supplied, but if it is supplied, it must
+   * equal the end time.  For `DELTA` metrics, the start and end time should
+   * specify a non-zero interval, with subsequent points specifying contiguous
+   * and non-overlapping intervals.  For `CUMULATIVE` metrics, the start and end
+   * time should specify a non-zero interval, with subsequent points specifying
+   * the same start time and increasing end times, until an event resets the
+   * cumulative value to zero and sets a new start time for the following
+   * points.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TimeInterval interval = 1; + */ + com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder getIntervalOrBuilder(); + + /** + * + * + *
+   * The value of the data point.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + * + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * + * + *
+   * The value of the data point.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + * + * @return The value. + */ + com.google.cloud.bigquery.migration.v2alpha.TypedValue getValue(); + /** + * + * + *
+   * The value of the data point.
+   * 
+ * + * .google.cloud.bigquery.migration.v2alpha.TypedValue value = 2; + */ + com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder getValueOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ResourceErrorDetail.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ResourceErrorDetail.java new file mode 100644 index 0000000..a61e069 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ResourceErrorDetail.java @@ -0,0 +1,1405 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_error_details.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * Provides details for errors and the corresponding resources.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail} + */ +public final class ResourceErrorDetail extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) + ResourceErrorDetailOrBuilder { + private static final long serialVersionUID = 0L; + // Use ResourceErrorDetail.newBuilder() to construct. + private ResourceErrorDetail(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ResourceErrorDetail() { + errorDetails_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ResourceErrorDetail(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private ResourceErrorDetail( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.rpc.ResourceInfo.Builder subBuilder = null; + if (resourceInfo_ != null) { + subBuilder = resourceInfo_.toBuilder(); + } + resourceInfo_ = + input.readMessage(com.google.rpc.ResourceInfo.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(resourceInfo_); + resourceInfo_ = subBuilder.buildPartial(); + } + + break; + } + case 18: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + errorDetails_ = + new java.util.ArrayList< + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail>(); + mutable_bitField0_ |= 0x00000001; + } + errorDetails_.add( + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.parser(), + extensionRegistry)); + break; + } + case 24: + { + errorCount_ = input.readInt32(); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + errorDetails_ = java.util.Collections.unmodifiableList(errorDetails_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ResourceErrorDetail_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ResourceErrorDetail_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.class, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder.class); + } + + public static final int RESOURCE_INFO_FIELD_NUMBER = 1; + private com.google.rpc.ResourceInfo resourceInfo_; + /** + * + * + *
+   * Required. Information about the resource where the error is located.
+   * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the resourceInfo field is set. + */ + @java.lang.Override + public boolean hasResourceInfo() { + return resourceInfo_ != null; + } + /** + * + * + *
+   * Required. Information about the resource where the error is located.
+   * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The resourceInfo. + */ + @java.lang.Override + public com.google.rpc.ResourceInfo getResourceInfo() { + return resourceInfo_ == null ? com.google.rpc.ResourceInfo.getDefaultInstance() : resourceInfo_; + } + /** + * + * + *
+   * Required. Information about the resource where the error is located.
+   * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.rpc.ResourceInfoOrBuilder getResourceInfoOrBuilder() { + return getResourceInfo(); + } + + public static final int ERROR_DETAILS_FIELD_NUMBER = 2; + private java.util.List errorDetails_; + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public java.util.List + getErrorDetailsList() { + return errorDetails_; + } + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public java.util.List + getErrorDetailsOrBuilderList() { + return errorDetails_; + } + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public int getErrorDetailsCount() { + return errorDetails_.size(); + } + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail getErrorDetails(int index) { + return errorDetails_.get(index); + } + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder getErrorDetailsOrBuilder( + int index) { + return errorDetails_.get(index); + } + + public static final int ERROR_COUNT_FIELD_NUMBER = 3; + private int errorCount_; + /** + * + * + *
+   * Required. How many errors there are in total for the resource. Truncation can be
+   * indicated by having an `error_count` that is higher than the size of
+   * `error_details`.
+   * 
+ * + * int32 error_count = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The errorCount. + */ + @java.lang.Override + public int getErrorCount() { + return errorCount_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (resourceInfo_ != null) { + output.writeMessage(1, getResourceInfo()); + } + for (int i = 0; i < errorDetails_.size(); i++) { + output.writeMessage(2, errorDetails_.get(i)); + } + if (errorCount_ != 0) { + output.writeInt32(3, errorCount_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (resourceInfo_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getResourceInfo()); + } + for (int i = 0; i < errorDetails_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, errorDetails_.get(i)); + } + if (errorCount_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, errorCount_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail other = + (com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) obj; + + if (hasResourceInfo() != other.hasResourceInfo()) return false; + if (hasResourceInfo()) { + if (!getResourceInfo().equals(other.getResourceInfo())) return false; + } + if (!getErrorDetailsList().equals(other.getErrorDetailsList())) return false; + if (getErrorCount() != other.getErrorCount()) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasResourceInfo()) { + hash = (37 * hash) + RESOURCE_INFO_FIELD_NUMBER; + hash = (53 * hash) + getResourceInfo().hashCode(); + } + if (getErrorDetailsCount() > 0) { + hash = (37 * hash) + ERROR_DETAILS_FIELD_NUMBER; + hash = (53 * hash) + getErrorDetailsList().hashCode(); + } + hash = (37 * hash) + ERROR_COUNT_FIELD_NUMBER; + hash = (53 * hash) + getErrorCount(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Provides details for errors and the corresponding resources.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetailOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ResourceErrorDetail_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ResourceErrorDetail_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.class, + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getErrorDetailsFieldBuilder(); + } + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (resourceInfoBuilder_ == null) { + resourceInfo_ = null; + } else { + resourceInfo_ = null; + resourceInfoBuilder_ = null; + } + if (errorDetailsBuilder_ == null) { + errorDetails_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + errorDetailsBuilder_.clear(); + } + errorCount_ = 0; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationErrorDetailsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_ResourceErrorDetail_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail build() { + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail result = + new com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail(this); + int from_bitField0_ = bitField0_; + if (resourceInfoBuilder_ == null) { + result.resourceInfo_ = resourceInfo_; + } else { + result.resourceInfo_ = resourceInfoBuilder_.build(); + } + if (errorDetailsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + errorDetails_ = java.util.Collections.unmodifiableList(errorDetails_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.errorDetails_ = errorDetails_; + } else { + result.errorDetails_ = errorDetailsBuilder_.build(); + } + result.errorCount_ = errorCount_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail.getDefaultInstance()) + return this; + if (other.hasResourceInfo()) { + mergeResourceInfo(other.getResourceInfo()); + } + if (errorDetailsBuilder_ == null) { + if (!other.errorDetails_.isEmpty()) { + if (errorDetails_.isEmpty()) { + errorDetails_ = other.errorDetails_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureErrorDetailsIsMutable(); + errorDetails_.addAll(other.errorDetails_); + } + onChanged(); + } + } else { + if (!other.errorDetails_.isEmpty()) { + if (errorDetailsBuilder_.isEmpty()) { + errorDetailsBuilder_.dispose(); + errorDetailsBuilder_ = null; + errorDetails_ = other.errorDetails_; + bitField0_ = (bitField0_ & ~0x00000001); + errorDetailsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getErrorDetailsFieldBuilder() + : null; + } else { + errorDetailsBuilder_.addAllMessages(other.errorDetails_); + } + } + } + if (other.getErrorCount() != 0) { + setErrorCount(other.getErrorCount()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private com.google.rpc.ResourceInfo resourceInfo_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ResourceInfo, + com.google.rpc.ResourceInfo.Builder, + com.google.rpc.ResourceInfoOrBuilder> + resourceInfoBuilder_; + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the resourceInfo field is set. + */ + public boolean hasResourceInfo() { + return resourceInfoBuilder_ != null || resourceInfo_ != null; + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The resourceInfo. + */ + public com.google.rpc.ResourceInfo getResourceInfo() { + if (resourceInfoBuilder_ == null) { + return resourceInfo_ == null + ? com.google.rpc.ResourceInfo.getDefaultInstance() + : resourceInfo_; + } else { + return resourceInfoBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setResourceInfo(com.google.rpc.ResourceInfo value) { + if (resourceInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + resourceInfo_ = value; + onChanged(); + } else { + resourceInfoBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setResourceInfo(com.google.rpc.ResourceInfo.Builder builderForValue) { + if (resourceInfoBuilder_ == null) { + resourceInfo_ = builderForValue.build(); + onChanged(); + } else { + resourceInfoBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeResourceInfo(com.google.rpc.ResourceInfo value) { + if (resourceInfoBuilder_ == null) { + if (resourceInfo_ != null) { + resourceInfo_ = + com.google.rpc.ResourceInfo.newBuilder(resourceInfo_).mergeFrom(value).buildPartial(); + } else { + resourceInfo_ = value; + } + onChanged(); + } else { + resourceInfoBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearResourceInfo() { + if (resourceInfoBuilder_ == null) { + resourceInfo_ = null; + onChanged(); + } else { + resourceInfo_ = null; + resourceInfoBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.rpc.ResourceInfo.Builder getResourceInfoBuilder() { + + onChanged(); + return getResourceInfoFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.rpc.ResourceInfoOrBuilder getResourceInfoOrBuilder() { + if (resourceInfoBuilder_ != null) { + return resourceInfoBuilder_.getMessageOrBuilder(); + } else { + return resourceInfo_ == null + ? com.google.rpc.ResourceInfo.getDefaultInstance() + : resourceInfo_; + } + } + /** + * + * + *
+     * Required. Information about the resource where the error is located.
+     * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ResourceInfo, + com.google.rpc.ResourceInfo.Builder, + com.google.rpc.ResourceInfoOrBuilder> + getResourceInfoFieldBuilder() { + if (resourceInfoBuilder_ == null) { + resourceInfoBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.rpc.ResourceInfo, + com.google.rpc.ResourceInfo.Builder, + com.google.rpc.ResourceInfoOrBuilder>( + getResourceInfo(), getParentForChildren(), isClean()); + resourceInfo_ = null; + } + return resourceInfoBuilder_; + } + + private java.util.List errorDetails_ = + java.util.Collections.emptyList(); + + private void ensureErrorDetailsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + errorDetails_ = + new java.util.ArrayList( + errorDetails_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder> + errorDetailsBuilder_; + + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public java.util.List + getErrorDetailsList() { + if (errorDetailsBuilder_ == null) { + return java.util.Collections.unmodifiableList(errorDetails_); + } else { + return errorDetailsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public int getErrorDetailsCount() { + if (errorDetailsBuilder_ == null) { + return errorDetails_.size(); + } else { + return errorDetailsBuilder_.getCount(); + } + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail getErrorDetails(int index) { + if (errorDetailsBuilder_ == null) { + return errorDetails_.get(index); + } else { + return errorDetailsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setErrorDetails( + int index, com.google.cloud.bigquery.migration.v2alpha.ErrorDetail value) { + if (errorDetailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureErrorDetailsIsMutable(); + errorDetails_.set(index, value); + onChanged(); + } else { + errorDetailsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setErrorDetails( + int index, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder builderForValue) { + if (errorDetailsBuilder_ == null) { + ensureErrorDetailsIsMutable(); + errorDetails_.set(index, builderForValue.build()); + onChanged(); + } else { + errorDetailsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addErrorDetails(com.google.cloud.bigquery.migration.v2alpha.ErrorDetail value) { + if (errorDetailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureErrorDetailsIsMutable(); + errorDetails_.add(value); + onChanged(); + } else { + errorDetailsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addErrorDetails( + int index, com.google.cloud.bigquery.migration.v2alpha.ErrorDetail value) { + if (errorDetailsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureErrorDetailsIsMutable(); + errorDetails_.add(index, value); + onChanged(); + } else { + errorDetailsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addErrorDetails( + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder builderForValue) { + if (errorDetailsBuilder_ == null) { + ensureErrorDetailsIsMutable(); + errorDetails_.add(builderForValue.build()); + onChanged(); + } else { + errorDetailsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addErrorDetails( + int index, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder builderForValue) { + if (errorDetailsBuilder_ == null) { + ensureErrorDetailsIsMutable(); + errorDetails_.add(index, builderForValue.build()); + onChanged(); + } else { + errorDetailsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addAllErrorDetails( + java.lang.Iterable + values) { + if (errorDetailsBuilder_ == null) { + ensureErrorDetailsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, errorDetails_); + onChanged(); + } else { + errorDetailsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearErrorDetails() { + if (errorDetailsBuilder_ == null) { + errorDetails_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + errorDetailsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder removeErrorDetails(int index) { + if (errorDetailsBuilder_ == null) { + ensureErrorDetailsIsMutable(); + errorDetails_.remove(index); + onChanged(); + } else { + errorDetailsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder getErrorDetailsBuilder( + int index) { + return getErrorDetailsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder + getErrorDetailsOrBuilder(int index) { + if (errorDetailsBuilder_ == null) { + return errorDetails_.get(index); + } else { + return errorDetailsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public java.util.List< + ? extends com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder> + getErrorDetailsOrBuilderList() { + if (errorDetailsBuilder_ != null) { + return errorDetailsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(errorDetails_); + } + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder + addErrorDetailsBuilder() { + return getErrorDetailsFieldBuilder() + .addBuilder(com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.getDefaultInstance()); + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder addErrorDetailsBuilder( + int index) { + return getErrorDetailsFieldBuilder() + .addBuilder( + index, com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.getDefaultInstance()); + } + /** + * + * + *
+     * Required. The error details for the resource.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public java.util.List + getErrorDetailsBuilderList() { + return getErrorDetailsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder> + getErrorDetailsFieldBuilder() { + if (errorDetailsBuilder_ == null) { + errorDetailsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail.Builder, + com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder>( + errorDetails_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + errorDetails_ = null; + } + return errorDetailsBuilder_; + } + + private int errorCount_; + /** + * + * + *
+     * Required. How many errors there are in total for the resource. Truncation can be
+     * indicated by having an `error_count` that is higher than the size of
+     * `error_details`.
+     * 
+ * + * int32 error_count = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The errorCount. + */ + @java.lang.Override + public int getErrorCount() { + return errorCount_; + } + /** + * + * + *
+     * Required. How many errors there are in total for the resource. Truncation can be
+     * indicated by having an `error_count` that is higher than the size of
+     * `error_details`.
+     * 
+ * + * int32 error_count = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The errorCount to set. + * @return This builder for chaining. + */ + public Builder setErrorCount(int value) { + + errorCount_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. How many errors there are in total for the resource. Truncation can be
+     * indicated by having an `error_count` that is higher than the size of
+     * `error_details`.
+     * 
+ * + * int32 error_count = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return This builder for chaining. + */ + public Builder clearErrorCount() { + + errorCount_ = 0; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) + private static final com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ResourceErrorDetail parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResourceErrorDetail(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ResourceErrorDetailOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ResourceErrorDetailOrBuilder.java new file mode 100644 index 0000000..e78e55a --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/ResourceErrorDetailOrBuilder.java @@ -0,0 +1,141 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_error_details.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface ResourceErrorDetailOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.ResourceErrorDetail) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. Information about the resource where the error is located.
+   * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the resourceInfo field is set. + */ + boolean hasResourceInfo(); + /** + * + * + *
+   * Required. Information about the resource where the error is located.
+   * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The resourceInfo. + */ + com.google.rpc.ResourceInfo getResourceInfo(); + /** + * + * + *
+   * Required. Information about the resource where the error is located.
+   * 
+ * + * .google.rpc.ResourceInfo resource_info = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.rpc.ResourceInfoOrBuilder getResourceInfoOrBuilder(); + + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + java.util.List getErrorDetailsList(); + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.ErrorDetail getErrorDetails(int index); + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + int getErrorDetailsCount(); + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + java.util.List + getErrorDetailsOrBuilderList(); + /** + * + * + *
+   * Required. The error details for the resource.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.ErrorDetail error_details = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.ErrorDetailOrBuilder getErrorDetailsOrBuilder( + int index); + + /** + * + * + *
+   * Required. How many errors there are in total for the resource. Truncation can be
+   * indicated by having an `error_count` that is higher than the size of
+   * `error_details`.
+   * 
+ * + * int32 error_count = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The errorCount. + */ + int getErrorCount(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/StartMigrationWorkflowRequest.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/StartMigrationWorkflowRequest.java new file mode 100644 index 0000000..a4125b4 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/StartMigrationWorkflowRequest.java @@ -0,0 +1,677 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A request to start a previously created migration workflow.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest} + */ +public final class StartMigrationWorkflowRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) + StartMigrationWorkflowRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use StartMigrationWorkflowRequest.newBuilder() to construct. + private StartMigrationWorkflowRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private StartMigrationWorkflowRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new StartMigrationWorkflowRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private StartMigrationWorkflowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_StartMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_StartMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest.Builder + .class); + } + + public static final int NAME_FIELD_NUMBER = 1; + private volatile java.lang.Object name_; + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest other = + (com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A request to start a previously created migration workflow.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_StartMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_StartMigrationWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest.class, + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + name_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationServiceProto + .internal_static_google_cloud_bigquery_migration_v2alpha_StartMigrationWorkflowRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest build() { + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest result = + new com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest(this); + result.name_ = name_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) { + return mergeFrom( + (com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest other) { + if (other + == com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + .getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The unique identifier for the migration workflow.
+     * Example: `projects/123/locations/us/workflows/1234`
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) + private static final com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public StartMigrationWorkflowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StartMigrationWorkflowRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/StartMigrationWorkflowRequestOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/StartMigrationWorkflowRequestOrBuilder.java new file mode 100644 index 0000000..ed49772 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/StartMigrationWorkflowRequestOrBuilder.java @@ -0,0 +1,56 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_service.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface StartMigrationWorkflowRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The unique identifier for the migration workflow.
+   * Example: `projects/123/locations/us/workflows/1234`
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeInterval.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeInterval.java new file mode 100644 index 0000000..4a4cdbb --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeInterval.java @@ -0,0 +1,1042 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A time interval extending just after a start time through an end time.
+ * If the start time is the same as the end time, then the interval
+ * represents a single point in time.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TimeInterval} + */ +public final class TimeInterval extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.TimeInterval) + TimeIntervalOrBuilder { + private static final long serialVersionUID = 0L; + // Use TimeInterval.newBuilder() to construct. + private TimeInterval(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private TimeInterval() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new TimeInterval(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private TimeInterval( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (startTime_ != null) { + subBuilder = startTime_.toBuilder(); + } + startTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(startTime_); + startTime_ = subBuilder.buildPartial(); + } + + break; + } + case 18: + { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (endTime_ != null) { + subBuilder = endTime_.toBuilder(); + } + endTime_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(endTime_); + endTime_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeInterval_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeInterval_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.class, + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder.class); + } + + public static final int START_TIME_FIELD_NUMBER = 1; + private com.google.protobuf.Timestamp startTime_; + /** + * + * + *
+   * Optional. The beginning of the time interval.  The default value
+   * for the start time is the end time. The start time must not be
+   * later than the end time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the startTime field is set. + */ + @java.lang.Override + public boolean hasStartTime() { + return startTime_ != null; + } + /** + * + * + *
+   * Optional. The beginning of the time interval.  The default value
+   * for the start time is the end time. The start time must not be
+   * later than the end time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The startTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getStartTime() { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + /** + * + * + *
+   * Optional. The beginning of the time interval.  The default value
+   * for the start time is the end time. The start time must not be
+   * later than the end time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + return getStartTime(); + } + + public static final int END_TIME_FIELD_NUMBER = 2; + private com.google.protobuf.Timestamp endTime_; + /** + * + * + *
+   * Required. The end of the time interval.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return Whether the endTime field is set. + */ + @java.lang.Override + public boolean hasEndTime() { + return endTime_ != null; + } + /** + * + * + *
+   * Required. The end of the time interval.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The endTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getEndTime() { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + /** + * + * + *
+   * Required. The end of the time interval.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + return getEndTime(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (startTime_ != null) { + output.writeMessage(1, getStartTime()); + } + if (endTime_ != null) { + output.writeMessage(2, getEndTime()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (startTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getStartTime()); + } + if (endTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEndTime()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.TimeInterval)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.TimeInterval other = + (com.google.cloud.bigquery.migration.v2alpha.TimeInterval) obj; + + if (hasStartTime() != other.hasStartTime()) return false; + if (hasStartTime()) { + if (!getStartTime().equals(other.getStartTime())) return false; + } + if (hasEndTime() != other.hasEndTime()) return false; + if (hasEndTime()) { + if (!getEndTime().equals(other.getEndTime())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasStartTime()) { + hash = (37 * hash) + START_TIME_FIELD_NUMBER; + hash = (53 * hash) + getStartTime().hashCode(); + } + if (hasEndTime()) { + hash = (37 * hash) + END_TIME_FIELD_NUMBER; + hash = (53 * hash) + getEndTime().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.TimeInterval prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A time interval extending just after a start time through an end time.
+   * If the start time is the same as the end time, then the interval
+   * represents a single point in time.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TimeInterval} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.TimeInterval) + com.google.cloud.bigquery.migration.v2alpha.TimeIntervalOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeInterval_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeInterval_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.class, + com.google.cloud.bigquery.migration.v2alpha.TimeInterval.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.TimeInterval.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (startTimeBuilder_ == null) { + startTime_ = null; + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + if (endTimeBuilder_ == null) { + endTime_ = null; + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeInterval_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.TimeInterval.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval build() { + com.google.cloud.bigquery.migration.v2alpha.TimeInterval result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.TimeInterval result = + new com.google.cloud.bigquery.migration.v2alpha.TimeInterval(this); + if (startTimeBuilder_ == null) { + result.startTime_ = startTime_; + } else { + result.startTime_ = startTimeBuilder_.build(); + } + if (endTimeBuilder_ == null) { + result.endTime_ = endTime_; + } else { + result.endTime_ = endTimeBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.TimeInterval) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.TimeInterval) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.TimeInterval other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.TimeInterval.getDefaultInstance()) + return this; + if (other.hasStartTime()) { + mergeStartTime(other.getStartTime()); + } + if (other.hasEndTime()) { + mergeEndTime(other.getEndTime()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.TimeInterval parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.TimeInterval) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private com.google.protobuf.Timestamp startTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + startTimeBuilder_; + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the startTime field is set. + */ + public boolean hasStartTime() { + return startTimeBuilder_ != null || startTime_ != null; + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The startTime. + */ + public com.google.protobuf.Timestamp getStartTime() { + if (startTimeBuilder_ == null) { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } else { + return startTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + startTime_ = value; + onChanged(); + } else { + startTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (startTimeBuilder_ == null) { + startTime_ = builderForValue.build(); + onChanged(); + } else { + startTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder mergeStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (startTime_ != null) { + startTime_ = + com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial(); + } else { + startTime_ = value; + } + onChanged(); + } else { + startTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder clearStartTime() { + if (startTimeBuilder_ == null) { + startTime_ = null; + onChanged(); + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { + + onChanged(); + return getStartTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + if (startTimeBuilder_ != null) { + return startTimeBuilder_.getMessageOrBuilder(); + } else { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + } + /** + * + * + *
+     * Optional. The beginning of the time interval.  The default value
+     * for the start time is the end time. The start time must not be
+     * later than the end time.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getStartTimeFieldBuilder() { + if (startTimeBuilder_ == null) { + startTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getStartTime(), getParentForChildren(), isClean()); + startTime_ = null; + } + return startTimeBuilder_; + } + + private com.google.protobuf.Timestamp endTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + endTimeBuilder_; + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the endTime field is set. + */ + public boolean hasEndTime() { + return endTimeBuilder_ != null || endTime_ != null; + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The endTime. + */ + public com.google.protobuf.Timestamp getEndTime() { + if (endTimeBuilder_ == null) { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } else { + return endTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + endTime_ = value; + onChanged(); + } else { + endTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (endTimeBuilder_ == null) { + endTime_ = builderForValue.build(); + onChanged(); + } else { + endTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (endTime_ != null) { + endTime_ = + com.google.protobuf.Timestamp.newBuilder(endTime_).mergeFrom(value).buildPartial(); + } else { + endTime_ = value; + } + onChanged(); + } else { + endTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearEndTime() { + if (endTimeBuilder_ == null) { + endTime_ = null; + onChanged(); + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { + + onChanged(); + return getEndTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + if (endTimeBuilder_ != null) { + return endTimeBuilder_.getMessageOrBuilder(); + } else { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + } + /** + * + * + *
+     * Required. The end of the time interval.
+     * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getEndTimeFieldBuilder() { + if (endTimeBuilder_ == null) { + endTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getEndTime(), getParentForChildren(), isClean()); + endTime_ = null; + } + return endTimeBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.TimeInterval) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.TimeInterval) + private static final com.google.cloud.bigquery.migration.v2alpha.TimeInterval DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.TimeInterval(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeInterval getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TimeInterval parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimeInterval(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeInterval getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeIntervalOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeIntervalOrBuilder.java new file mode 100644 index 0000000..d65c2dc --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeIntervalOrBuilder.java @@ -0,0 +1,104 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface TimeIntervalOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.TimeInterval) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Optional. The beginning of the time interval.  The default value
+   * for the start time is the end time. The start time must not be
+   * later than the end time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the startTime field is set. + */ + boolean hasStartTime(); + /** + * + * + *
+   * Optional. The beginning of the time interval.  The default value
+   * for the start time is the end time. The start time must not be
+   * later than the end time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The startTime. + */ + com.google.protobuf.Timestamp getStartTime(); + /** + * + * + *
+   * Optional. The beginning of the time interval.  The default value
+   * for the start time is the end time. The start time must not be
+   * later than the end time.
+   * 
+ * + * .google.protobuf.Timestamp start_time = 1 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder(); + + /** + * + * + *
+   * Required. The end of the time interval.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return Whether the endTime field is set. + */ + boolean hasEndTime(); + /** + * + * + *
+   * Required. The end of the time interval.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The endTime. + */ + com.google.protobuf.Timestamp getEndTime(); + /** + * + * + *
+   * Required. The end of the time interval.
+   * 
+ * + * .google.protobuf.Timestamp end_time = 2 [(.google.api.field_behavior) = REQUIRED]; + */ + com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeSeries.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeSeries.java new file mode 100644 index 0000000..7609b41 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeSeries.java @@ -0,0 +1,1686 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * The metrics object for a SubTask.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TimeSeries} + */ +public final class TimeSeries extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.TimeSeries) + TimeSeriesOrBuilder { + private static final long serialVersionUID = 0L; + // Use TimeSeries.newBuilder() to construct. + private TimeSeries(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private TimeSeries() { + metric_ = ""; + valueType_ = 0; + metricKind_ = 0; + points_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new TimeSeries(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private TimeSeries( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + java.lang.String s = input.readStringRequireUtf8(); + + metric_ = s; + break; + } + case 16: + { + int rawValue = input.readEnum(); + + valueType_ = rawValue; + break; + } + case 24: + { + int rawValue = input.readEnum(); + + metricKind_ = rawValue; + break; + } + case 34: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + points_ = + new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + points_.add( + input.readMessage( + com.google.cloud.bigquery.migration.v2alpha.Point.parser(), + extensionRegistry)); + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + points_ = java.util.Collections.unmodifiableList(points_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeSeries_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeSeries_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.class, + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder.class); + } + + public static final int METRIC_FIELD_NUMBER = 1; + private volatile java.lang.Object metric_; + /** + * + * + *
+   * Required. The name of the metric.
+   * If the metric is not known by the service yet, it will be auto-created.
+   * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The metric. + */ + @java.lang.Override + public java.lang.String getMetric() { + java.lang.Object ref = metric_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + metric_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the metric.
+   * If the metric is not known by the service yet, it will be auto-created.
+   * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for metric. + */ + @java.lang.Override + public com.google.protobuf.ByteString getMetricBytes() { + java.lang.Object ref = metric_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + metric_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VALUE_TYPE_FIELD_NUMBER = 2; + private int valueType_; + /** + * + * + *
+   * Required. The value type of the time series.
+   * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The enum numeric value on the wire for valueType. + */ + @java.lang.Override + public int getValueTypeValue() { + return valueType_; + } + /** + * + * + *
+   * Required. The value type of the time series.
+   * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The valueType. + */ + @java.lang.Override + public com.google.api.MetricDescriptor.ValueType getValueType() { + @SuppressWarnings("deprecation") + com.google.api.MetricDescriptor.ValueType result = + com.google.api.MetricDescriptor.ValueType.valueOf(valueType_); + return result == null ? com.google.api.MetricDescriptor.ValueType.UNRECOGNIZED : result; + } + + public static final int METRIC_KIND_FIELD_NUMBER = 3; + private int metricKind_; + /** + * + * + *
+   * Optional. The metric kind of the time series.
+   * If present, it must be the same as the metric kind of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * this field specifies the metric kind of the new descriptor and must be
+   * either `GAUGE` (the default) or `CUMULATIVE`.
+   * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The enum numeric value on the wire for metricKind. + */ + @java.lang.Override + public int getMetricKindValue() { + return metricKind_; + } + /** + * + * + *
+   * Optional. The metric kind of the time series.
+   * If present, it must be the same as the metric kind of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * this field specifies the metric kind of the new descriptor and must be
+   * either `GAUGE` (the default) or `CUMULATIVE`.
+   * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The metricKind. + */ + @java.lang.Override + public com.google.api.MetricDescriptor.MetricKind getMetricKind() { + @SuppressWarnings("deprecation") + com.google.api.MetricDescriptor.MetricKind result = + com.google.api.MetricDescriptor.MetricKind.valueOf(metricKind_); + return result == null ? com.google.api.MetricDescriptor.MetricKind.UNRECOGNIZED : result; + } + + public static final int POINTS_FIELD_NUMBER = 4; + private java.util.List points_; + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public java.util.List getPointsList() { + return points_; + } + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public java.util.List + getPointsOrBuilderList() { + return points_; + } + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public int getPointsCount() { + return points_.size(); + } + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.Point getPoints(int index) { + return points_.get(index); + } + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder getPointsOrBuilder(int index) { + return points_.get(index); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getMetricBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, metric_); + } + if (valueType_ + != com.google.api.MetricDescriptor.ValueType.VALUE_TYPE_UNSPECIFIED.getNumber()) { + output.writeEnum(2, valueType_); + } + if (metricKind_ + != com.google.api.MetricDescriptor.MetricKind.METRIC_KIND_UNSPECIFIED.getNumber()) { + output.writeEnum(3, metricKind_); + } + for (int i = 0; i < points_.size(); i++) { + output.writeMessage(4, points_.get(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getMetricBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, metric_); + } + if (valueType_ + != com.google.api.MetricDescriptor.ValueType.VALUE_TYPE_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, valueType_); + } + if (metricKind_ + != com.google.api.MetricDescriptor.MetricKind.METRIC_KIND_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, metricKind_); + } + for (int i = 0; i < points_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, points_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.TimeSeries)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.TimeSeries other = + (com.google.cloud.bigquery.migration.v2alpha.TimeSeries) obj; + + if (!getMetric().equals(other.getMetric())) return false; + if (valueType_ != other.valueType_) return false; + if (metricKind_ != other.metricKind_) return false; + if (!getPointsList().equals(other.getPointsList())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + METRIC_FIELD_NUMBER; + hash = (53 * hash) + getMetric().hashCode(); + hash = (37 * hash) + VALUE_TYPE_FIELD_NUMBER; + hash = (53 * hash) + valueType_; + hash = (37 * hash) + METRIC_KIND_FIELD_NUMBER; + hash = (53 * hash) + metricKind_; + if (getPointsCount() > 0) { + hash = (37 * hash) + POINTS_FIELD_NUMBER; + hash = (53 * hash) + getPointsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.TimeSeries prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * The metrics object for a SubTask.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TimeSeries} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.TimeSeries) + com.google.cloud.bigquery.migration.v2alpha.TimeSeriesOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeSeries_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeSeries_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.class, + com.google.cloud.bigquery.migration.v2alpha.TimeSeries.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.TimeSeries.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getPointsFieldBuilder(); + } + } + + @java.lang.Override + public Builder clear() { + super.clear(); + metric_ = ""; + + valueType_ = 0; + + metricKind_ = 0; + + if (pointsBuilder_ == null) { + points_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + pointsBuilder_.clear(); + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TimeSeries_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.TimeSeries.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries build() { + com.google.cloud.bigquery.migration.v2alpha.TimeSeries result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.TimeSeries result = + new com.google.cloud.bigquery.migration.v2alpha.TimeSeries(this); + int from_bitField0_ = bitField0_; + result.metric_ = metric_; + result.valueType_ = valueType_; + result.metricKind_ = metricKind_; + if (pointsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + points_ = java.util.Collections.unmodifiableList(points_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.points_ = points_; + } else { + result.points_ = pointsBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.TimeSeries) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.TimeSeries) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.TimeSeries other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.TimeSeries.getDefaultInstance()) + return this; + if (!other.getMetric().isEmpty()) { + metric_ = other.metric_; + onChanged(); + } + if (other.valueType_ != 0) { + setValueTypeValue(other.getValueTypeValue()); + } + if (other.metricKind_ != 0) { + setMetricKindValue(other.getMetricKindValue()); + } + if (pointsBuilder_ == null) { + if (!other.points_.isEmpty()) { + if (points_.isEmpty()) { + points_ = other.points_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensurePointsIsMutable(); + points_.addAll(other.points_); + } + onChanged(); + } + } else { + if (!other.points_.isEmpty()) { + if (pointsBuilder_.isEmpty()) { + pointsBuilder_.dispose(); + pointsBuilder_ = null; + points_ = other.points_; + bitField0_ = (bitField0_ & ~0x00000001); + pointsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getPointsFieldBuilder() + : null; + } else { + pointsBuilder_.addAllMessages(other.points_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.TimeSeries parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.TimeSeries) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private java.lang.Object metric_ = ""; + /** + * + * + *
+     * Required. The name of the metric.
+     * If the metric is not known by the service yet, it will be auto-created.
+     * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The metric. + */ + public java.lang.String getMetric() { + java.lang.Object ref = metric_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + metric_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the metric.
+     * If the metric is not known by the service yet, it will be auto-created.
+     * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for metric. + */ + public com.google.protobuf.ByteString getMetricBytes() { + java.lang.Object ref = metric_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + metric_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the metric.
+     * If the metric is not known by the service yet, it will be auto-created.
+     * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The metric to set. + * @return This builder for chaining. + */ + public Builder setMetric(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + metric_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the metric.
+     * If the metric is not known by the service yet, it will be auto-created.
+     * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return This builder for chaining. + */ + public Builder clearMetric() { + + metric_ = getDefaultInstance().getMetric(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the metric.
+     * If the metric is not known by the service yet, it will be auto-created.
+     * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The bytes for metric to set. + * @return This builder for chaining. + */ + public Builder setMetricBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + metric_ = value; + onChanged(); + return this; + } + + private int valueType_ = 0; + /** + * + * + *
+     * Required. The value type of the time series.
+     * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The enum numeric value on the wire for valueType. + */ + @java.lang.Override + public int getValueTypeValue() { + return valueType_; + } + /** + * + * + *
+     * Required. The value type of the time series.
+     * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @param value The enum numeric value on the wire for valueType to set. + * @return This builder for chaining. + */ + public Builder setValueTypeValue(int value) { + + valueType_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The value type of the time series.
+     * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The valueType. + */ + @java.lang.Override + public com.google.api.MetricDescriptor.ValueType getValueType() { + @SuppressWarnings("deprecation") + com.google.api.MetricDescriptor.ValueType result = + com.google.api.MetricDescriptor.ValueType.valueOf(valueType_); + return result == null ? com.google.api.MetricDescriptor.ValueType.UNRECOGNIZED : result; + } + /** + * + * + *
+     * Required. The value type of the time series.
+     * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @param value The valueType to set. + * @return This builder for chaining. + */ + public Builder setValueType(com.google.api.MetricDescriptor.ValueType value) { + if (value == null) { + throw new NullPointerException(); + } + + valueType_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The value type of the time series.
+     * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return This builder for chaining. + */ + public Builder clearValueType() { + + valueType_ = 0; + onChanged(); + return this; + } + + private int metricKind_ = 0; + /** + * + * + *
+     * Optional. The metric kind of the time series.
+     * If present, it must be the same as the metric kind of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * this field specifies the metric kind of the new descriptor and must be
+     * either `GAUGE` (the default) or `CUMULATIVE`.
+     * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The enum numeric value on the wire for metricKind. + */ + @java.lang.Override + public int getMetricKindValue() { + return metricKind_; + } + /** + * + * + *
+     * Optional. The metric kind of the time series.
+     * If present, it must be the same as the metric kind of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * this field specifies the metric kind of the new descriptor and must be
+     * either `GAUGE` (the default) or `CUMULATIVE`.
+     * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @param value The enum numeric value on the wire for metricKind to set. + * @return This builder for chaining. + */ + public Builder setMetricKindValue(int value) { + + metricKind_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. The metric kind of the time series.
+     * If present, it must be the same as the metric kind of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * this field specifies the metric kind of the new descriptor and must be
+     * either `GAUGE` (the default) or `CUMULATIVE`.
+     * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The metricKind. + */ + @java.lang.Override + public com.google.api.MetricDescriptor.MetricKind getMetricKind() { + @SuppressWarnings("deprecation") + com.google.api.MetricDescriptor.MetricKind result = + com.google.api.MetricDescriptor.MetricKind.valueOf(metricKind_); + return result == null ? com.google.api.MetricDescriptor.MetricKind.UNRECOGNIZED : result; + } + /** + * + * + *
+     * Optional. The metric kind of the time series.
+     * If present, it must be the same as the metric kind of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * this field specifies the metric kind of the new descriptor and must be
+     * either `GAUGE` (the default) or `CUMULATIVE`.
+     * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @param value The metricKind to set. + * @return This builder for chaining. + */ + public Builder setMetricKind(com.google.api.MetricDescriptor.MetricKind value) { + if (value == null) { + throw new NullPointerException(); + } + + metricKind_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. The metric kind of the time series.
+     * If present, it must be the same as the metric kind of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * this field specifies the metric kind of the new descriptor and must be
+     * either `GAUGE` (the default) or `CUMULATIVE`.
+     * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return This builder for chaining. + */ + public Builder clearMetricKind() { + + metricKind_ = 0; + onChanged(); + return this; + } + + private java.util.List points_ = + java.util.Collections.emptyList(); + + private void ensurePointsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + points_ = + new java.util.ArrayList(points_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.Point, + com.google.cloud.bigquery.migration.v2alpha.Point.Builder, + com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder> + pointsBuilder_; + + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public java.util.List getPointsList() { + if (pointsBuilder_ == null) { + return java.util.Collections.unmodifiableList(points_); + } else { + return pointsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public int getPointsCount() { + if (pointsBuilder_ == null) { + return points_.size(); + } else { + return pointsBuilder_.getCount(); + } + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.Point getPoints(int index) { + if (pointsBuilder_ == null) { + return points_.get(index); + } else { + return pointsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setPoints(int index, com.google.cloud.bigquery.migration.v2alpha.Point value) { + if (pointsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePointsIsMutable(); + points_.set(index, value); + onChanged(); + } else { + pointsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setPoints( + int index, com.google.cloud.bigquery.migration.v2alpha.Point.Builder builderForValue) { + if (pointsBuilder_ == null) { + ensurePointsIsMutable(); + points_.set(index, builderForValue.build()); + onChanged(); + } else { + pointsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addPoints(com.google.cloud.bigquery.migration.v2alpha.Point value) { + if (pointsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePointsIsMutable(); + points_.add(value); + onChanged(); + } else { + pointsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addPoints(int index, com.google.cloud.bigquery.migration.v2alpha.Point value) { + if (pointsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePointsIsMutable(); + points_.add(index, value); + onChanged(); + } else { + pointsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addPoints( + com.google.cloud.bigquery.migration.v2alpha.Point.Builder builderForValue) { + if (pointsBuilder_ == null) { + ensurePointsIsMutable(); + points_.add(builderForValue.build()); + onChanged(); + } else { + pointsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addPoints( + int index, com.google.cloud.bigquery.migration.v2alpha.Point.Builder builderForValue) { + if (pointsBuilder_ == null) { + ensurePointsIsMutable(); + points_.add(index, builderForValue.build()); + onChanged(); + } else { + pointsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder addAllPoints( + java.lang.Iterable values) { + if (pointsBuilder_ == null) { + ensurePointsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, points_); + onChanged(); + } else { + pointsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearPoints() { + if (pointsBuilder_ == null) { + points_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + pointsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder removePoints(int index) { + if (pointsBuilder_ == null) { + ensurePointsIsMutable(); + points_.remove(index); + onChanged(); + } else { + pointsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.Point.Builder getPointsBuilder(int index) { + return getPointsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder getPointsOrBuilder( + int index) { + if (pointsBuilder_ == null) { + return points_.get(index); + } else { + return pointsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public java.util.List + getPointsOrBuilderList() { + if (pointsBuilder_ != null) { + return pointsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(points_); + } + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.Point.Builder addPointsBuilder() { + return getPointsFieldBuilder() + .addBuilder(com.google.cloud.bigquery.migration.v2alpha.Point.getDefaultInstance()); + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.migration.v2alpha.Point.Builder addPointsBuilder(int index) { + return getPointsFieldBuilder() + .addBuilder( + index, com.google.cloud.bigquery.migration.v2alpha.Point.getDefaultInstance()); + } + /** + * + * + *
+     * Required. The data points of this time series. When listing time series, points are
+     * returned in reverse time order.
+     * When creating a time series, this field must contain exactly one point and
+     * the point's type must be the same as the value type of the associated
+     * metric. If the associated metric's descriptor must be auto-created, then
+     * the value type of the descriptor is determined by the point's type, which
+     * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+     * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public java.util.List + getPointsBuilderList() { + return getPointsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.Point, + com.google.cloud.bigquery.migration.v2alpha.Point.Builder, + com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder> + getPointsFieldBuilder() { + if (pointsBuilder_ == null) { + pointsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.migration.v2alpha.Point, + com.google.cloud.bigquery.migration.v2alpha.Point.Builder, + com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder>( + points_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + points_ = null; + } + return pointsBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.TimeSeries) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.TimeSeries) + private static final com.google.cloud.bigquery.migration.v2alpha.TimeSeries DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.TimeSeries(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TimeSeries getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TimeSeries parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TimeSeries(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TimeSeries getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeSeriesOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeSeriesOrBuilder.java new file mode 100644 index 0000000..91b6e84 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TimeSeriesOrBuilder.java @@ -0,0 +1,210 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface TimeSeriesOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.TimeSeries) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the metric.
+   * If the metric is not known by the service yet, it will be auto-created.
+   * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The metric. + */ + java.lang.String getMetric(); + /** + * + * + *
+   * Required. The name of the metric.
+   * If the metric is not known by the service yet, it will be auto-created.
+   * 
+ * + * string metric = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for metric. + */ + com.google.protobuf.ByteString getMetricBytes(); + + /** + * + * + *
+   * Required. The value type of the time series.
+   * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The enum numeric value on the wire for valueType. + */ + int getValueTypeValue(); + /** + * + * + *
+   * Required. The value type of the time series.
+   * 
+ * + * + * .google.api.MetricDescriptor.ValueType value_type = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The valueType. + */ + com.google.api.MetricDescriptor.ValueType getValueType(); + + /** + * + * + *
+   * Optional. The metric kind of the time series.
+   * If present, it must be the same as the metric kind of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * this field specifies the metric kind of the new descriptor and must be
+   * either `GAUGE` (the default) or `CUMULATIVE`.
+   * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The enum numeric value on the wire for metricKind. + */ + int getMetricKindValue(); + /** + * + * + *
+   * Optional. The metric kind of the time series.
+   * If present, it must be the same as the metric kind of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * this field specifies the metric kind of the new descriptor and must be
+   * either `GAUGE` (the default) or `CUMULATIVE`.
+   * 
+ * + * + * .google.api.MetricDescriptor.MetricKind metric_kind = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The metricKind. + */ + com.google.api.MetricDescriptor.MetricKind getMetricKind(); + + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + java.util.List getPointsList(); + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.Point getPoints(int index); + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + int getPointsCount(); + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + java.util.List + getPointsOrBuilderList(); + /** + * + * + *
+   * Required. The data points of this time series. When listing time series, points are
+   * returned in reverse time order.
+   * When creating a time series, this field must contain exactly one point and
+   * the point's type must be the same as the value type of the associated
+   * metric. If the associated metric's descriptor must be auto-created, then
+   * the value type of the descriptor is determined by the point's type, which
+   * must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+   * 
+ * + * + * repeated .google.cloud.bigquery.migration.v2alpha.Point points = 4 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.migration.v2alpha.PointOrBuilder getPointsOrBuilder(int index); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TypedValue.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TypedValue.java new file mode 100644 index 0000000..ecc34f3 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TypedValue.java @@ -0,0 +1,1490 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +/** + * + * + *
+ * A single strongly-typed value.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TypedValue} + */ +public final class TypedValue extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.TypedValue) + TypedValueOrBuilder { + private static final long serialVersionUID = 0L; + // Use TypedValue.newBuilder() to construct. + private TypedValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private TypedValue() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new TypedValue(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private TypedValue( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: + { + valueCase_ = 1; + value_ = input.readBool(); + break; + } + case 16: + { + valueCase_ = 2; + value_ = input.readInt64(); + break; + } + case 25: + { + valueCase_ = 3; + value_ = input.readDouble(); + break; + } + case 34: + { + java.lang.String s = input.readStringRequireUtf8(); + valueCase_ = 4; + value_ = s; + break; + } + case 42: + { + com.google.api.Distribution.Builder subBuilder = null; + if (valueCase_ == 5) { + subBuilder = ((com.google.api.Distribution) value_).toBuilder(); + } + value_ = input.readMessage(com.google.api.Distribution.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.api.Distribution) value_); + value_ = subBuilder.buildPartial(); + } + valueCase_ = 5; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TypedValue_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TypedValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.TypedValue.class, + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder.class); + } + + private int valueCase_ = 0; + private java.lang.Object value_; + + public enum ValueCase + implements + com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + BOOL_VALUE(1), + INT64_VALUE(2), + DOUBLE_VALUE(3), + STRING_VALUE(4), + DISTRIBUTION_VALUE(5), + VALUE_NOT_SET(0); + private final int value; + + private ValueCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ValueCase valueOf(int value) { + return forNumber(value); + } + + public static ValueCase forNumber(int value) { + switch (value) { + case 1: + return BOOL_VALUE; + case 2: + return INT64_VALUE; + case 3: + return DOUBLE_VALUE; + case 4: + return STRING_VALUE; + case 5: + return DISTRIBUTION_VALUE; + case 0: + return VALUE_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public ValueCase getValueCase() { + return ValueCase.forNumber(valueCase_); + } + + public static final int BOOL_VALUE_FIELD_NUMBER = 1; + /** + * + * + *
+   * A Boolean value: `true` or `false`.
+   * 
+ * + * bool bool_value = 1; + * + * @return Whether the boolValue field is set. + */ + @java.lang.Override + public boolean hasBoolValue() { + return valueCase_ == 1; + } + /** + * + * + *
+   * A Boolean value: `true` or `false`.
+   * 
+ * + * bool bool_value = 1; + * + * @return The boolValue. + */ + @java.lang.Override + public boolean getBoolValue() { + if (valueCase_ == 1) { + return (java.lang.Boolean) value_; + } + return false; + } + + public static final int INT64_VALUE_FIELD_NUMBER = 2; + /** + * + * + *
+   * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+   * 
+ * + * int64 int64_value = 2; + * + * @return Whether the int64Value field is set. + */ + @java.lang.Override + public boolean hasInt64Value() { + return valueCase_ == 2; + } + /** + * + * + *
+   * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+   * 
+ * + * int64 int64_value = 2; + * + * @return The int64Value. + */ + @java.lang.Override + public long getInt64Value() { + if (valueCase_ == 2) { + return (java.lang.Long) value_; + } + return 0L; + } + + public static final int DOUBLE_VALUE_FIELD_NUMBER = 3; + /** + * + * + *
+   * A 64-bit double-precision floating-point number. Its magnitude
+   * is approximately +/-10^(+/-300) and it has 16 significant digits of
+   * precision.
+   * 
+ * + * double double_value = 3; + * + * @return Whether the doubleValue field is set. + */ + @java.lang.Override + public boolean hasDoubleValue() { + return valueCase_ == 3; + } + /** + * + * + *
+   * A 64-bit double-precision floating-point number. Its magnitude
+   * is approximately +/-10^(+/-300) and it has 16 significant digits of
+   * precision.
+   * 
+ * + * double double_value = 3; + * + * @return The doubleValue. + */ + @java.lang.Override + public double getDoubleValue() { + if (valueCase_ == 3) { + return (java.lang.Double) value_; + } + return 0D; + } + + public static final int STRING_VALUE_FIELD_NUMBER = 4; + /** + * + * + *
+   * A variable-length string value.
+   * 
+ * + * string string_value = 4; + * + * @return Whether the stringValue field is set. + */ + public boolean hasStringValue() { + return valueCase_ == 4; + } + /** + * + * + *
+   * A variable-length string value.
+   * 
+ * + * string string_value = 4; + * + * @return The stringValue. + */ + public java.lang.String getStringValue() { + java.lang.Object ref = ""; + if (valueCase_ == 4) { + ref = value_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (valueCase_ == 4) { + value_ = s; + } + return s; + } + } + /** + * + * + *
+   * A variable-length string value.
+   * 
+ * + * string string_value = 4; + * + * @return The bytes for stringValue. + */ + public com.google.protobuf.ByteString getStringValueBytes() { + java.lang.Object ref = ""; + if (valueCase_ == 4) { + ref = value_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (valueCase_ == 4) { + value_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DISTRIBUTION_VALUE_FIELD_NUMBER = 5; + /** + * + * + *
+   * A distribution value.
+   * 
+ * + * .google.api.Distribution distribution_value = 5; + * + * @return Whether the distributionValue field is set. + */ + @java.lang.Override + public boolean hasDistributionValue() { + return valueCase_ == 5; + } + /** + * + * + *
+   * A distribution value.
+   * 
+ * + * .google.api.Distribution distribution_value = 5; + * + * @return The distributionValue. + */ + @java.lang.Override + public com.google.api.Distribution getDistributionValue() { + if (valueCase_ == 5) { + return (com.google.api.Distribution) value_; + } + return com.google.api.Distribution.getDefaultInstance(); + } + /** + * + * + *
+   * A distribution value.
+   * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + @java.lang.Override + public com.google.api.DistributionOrBuilder getDistributionValueOrBuilder() { + if (valueCase_ == 5) { + return (com.google.api.Distribution) value_; + } + return com.google.api.Distribution.getDefaultInstance(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (valueCase_ == 1) { + output.writeBool(1, (boolean) ((java.lang.Boolean) value_)); + } + if (valueCase_ == 2) { + output.writeInt64(2, (long) ((java.lang.Long) value_)); + } + if (valueCase_ == 3) { + output.writeDouble(3, (double) ((java.lang.Double) value_)); + } + if (valueCase_ == 4) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, value_); + } + if (valueCase_ == 5) { + output.writeMessage(5, (com.google.api.Distribution) value_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (valueCase_ == 1) { + size += + com.google.protobuf.CodedOutputStream.computeBoolSize( + 1, (boolean) ((java.lang.Boolean) value_)); + } + if (valueCase_ == 2) { + size += + com.google.protobuf.CodedOutputStream.computeInt64Size( + 2, (long) ((java.lang.Long) value_)); + } + if (valueCase_ == 3) { + size += + com.google.protobuf.CodedOutputStream.computeDoubleSize( + 3, (double) ((java.lang.Double) value_)); + } + if (valueCase_ == 4) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, value_); + } + if (valueCase_ == 5) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize( + 5, (com.google.api.Distribution) value_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.TypedValue)) { + return super.equals(obj); + } + com.google.cloud.bigquery.migration.v2alpha.TypedValue other = + (com.google.cloud.bigquery.migration.v2alpha.TypedValue) obj; + + if (!getValueCase().equals(other.getValueCase())) return false; + switch (valueCase_) { + case 1: + if (getBoolValue() != other.getBoolValue()) return false; + break; + case 2: + if (getInt64Value() != other.getInt64Value()) return false; + break; + case 3: + if (java.lang.Double.doubleToLongBits(getDoubleValue()) + != java.lang.Double.doubleToLongBits(other.getDoubleValue())) return false; + break; + case 4: + if (!getStringValue().equals(other.getStringValue())) return false; + break; + case 5: + if (!getDistributionValue().equals(other.getDistributionValue())) return false; + break; + case 0: + default: + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + switch (valueCase_) { + case 1: + hash = (37 * hash) + BOOL_VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getBoolValue()); + break; + case 2: + hash = (37 * hash) + INT64_VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getInt64Value()); + break; + case 3: + hash = (37 * hash) + DOUBLE_VALUE_FIELD_NUMBER; + hash = + (53 * hash) + + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getDoubleValue())); + break; + case 4: + hash = (37 * hash) + STRING_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getStringValue().hashCode(); + break; + case 5: + hash = (37 * hash) + DISTRIBUTION_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getDistributionValue().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.migration.v2alpha.TypedValue prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * A single strongly-typed value.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TypedValue} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.TypedValue) + com.google.cloud.bigquery.migration.v2alpha.TypedValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TypedValue_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TypedValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.migration.v2alpha.TypedValue.class, + com.google.cloud.bigquery.migration.v2alpha.TypedValue.Builder.class); + } + + // Construct using com.google.cloud.bigquery.migration.v2alpha.TypedValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + valueCase_ = 0; + value_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.migration.v2alpha.MigrationMetricsProto + .internal_static_google_cloud_bigquery_migration_v2alpha_TypedValue_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TypedValue getDefaultInstanceForType() { + return com.google.cloud.bigquery.migration.v2alpha.TypedValue.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TypedValue build() { + com.google.cloud.bigquery.migration.v2alpha.TypedValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TypedValue buildPartial() { + com.google.cloud.bigquery.migration.v2alpha.TypedValue result = + new com.google.cloud.bigquery.migration.v2alpha.TypedValue(this); + if (valueCase_ == 1) { + result.value_ = value_; + } + if (valueCase_ == 2) { + result.value_ = value_; + } + if (valueCase_ == 3) { + result.value_ = value_; + } + if (valueCase_ == 4) { + result.value_ = value_; + } + if (valueCase_ == 5) { + if (distributionValueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = distributionValueBuilder_.build(); + } + } + result.valueCase_ = valueCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.migration.v2alpha.TypedValue) { + return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.TypedValue) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.migration.v2alpha.TypedValue other) { + if (other == com.google.cloud.bigquery.migration.v2alpha.TypedValue.getDefaultInstance()) + return this; + switch (other.getValueCase()) { + case BOOL_VALUE: + { + setBoolValue(other.getBoolValue()); + break; + } + case INT64_VALUE: + { + setInt64Value(other.getInt64Value()); + break; + } + case DOUBLE_VALUE: + { + setDoubleValue(other.getDoubleValue()); + break; + } + case STRING_VALUE: + { + valueCase_ = 4; + value_ = other.value_; + onChanged(); + break; + } + case DISTRIBUTION_VALUE: + { + mergeDistributionValue(other.getDistributionValue()); + break; + } + case VALUE_NOT_SET: + { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.migration.v2alpha.TypedValue parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.migration.v2alpha.TypedValue) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int valueCase_ = 0; + private java.lang.Object value_; + + public ValueCase getValueCase() { + return ValueCase.forNumber(valueCase_); + } + + public Builder clearValue() { + valueCase_ = 0; + value_ = null; + onChanged(); + return this; + } + + /** + * + * + *
+     * A Boolean value: `true` or `false`.
+     * 
+ * + * bool bool_value = 1; + * + * @return Whether the boolValue field is set. + */ + public boolean hasBoolValue() { + return valueCase_ == 1; + } + /** + * + * + *
+     * A Boolean value: `true` or `false`.
+     * 
+ * + * bool bool_value = 1; + * + * @return The boolValue. + */ + public boolean getBoolValue() { + if (valueCase_ == 1) { + return (java.lang.Boolean) value_; + } + return false; + } + /** + * + * + *
+     * A Boolean value: `true` or `false`.
+     * 
+ * + * bool bool_value = 1; + * + * @param value The boolValue to set. + * @return This builder for chaining. + */ + public Builder setBoolValue(boolean value) { + valueCase_ = 1; + value_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A Boolean value: `true` or `false`.
+     * 
+ * + * bool bool_value = 1; + * + * @return This builder for chaining. + */ + public Builder clearBoolValue() { + if (valueCase_ == 1) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + * + * + *
+     * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+     * 
+ * + * int64 int64_value = 2; + * + * @return Whether the int64Value field is set. + */ + public boolean hasInt64Value() { + return valueCase_ == 2; + } + /** + * + * + *
+     * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+     * 
+ * + * int64 int64_value = 2; + * + * @return The int64Value. + */ + public long getInt64Value() { + if (valueCase_ == 2) { + return (java.lang.Long) value_; + } + return 0L; + } + /** + * + * + *
+     * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+     * 
+ * + * int64 int64_value = 2; + * + * @param value The int64Value to set. + * @return This builder for chaining. + */ + public Builder setInt64Value(long value) { + valueCase_ = 2; + value_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+     * 
+ * + * int64 int64_value = 2; + * + * @return This builder for chaining. + */ + public Builder clearInt64Value() { + if (valueCase_ == 2) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + * + * + *
+     * A 64-bit double-precision floating-point number. Its magnitude
+     * is approximately +/-10^(+/-300) and it has 16 significant digits of
+     * precision.
+     * 
+ * + * double double_value = 3; + * + * @return Whether the doubleValue field is set. + */ + public boolean hasDoubleValue() { + return valueCase_ == 3; + } + /** + * + * + *
+     * A 64-bit double-precision floating-point number. Its magnitude
+     * is approximately +/-10^(+/-300) and it has 16 significant digits of
+     * precision.
+     * 
+ * + * double double_value = 3; + * + * @return The doubleValue. + */ + public double getDoubleValue() { + if (valueCase_ == 3) { + return (java.lang.Double) value_; + } + return 0D; + } + /** + * + * + *
+     * A 64-bit double-precision floating-point number. Its magnitude
+     * is approximately +/-10^(+/-300) and it has 16 significant digits of
+     * precision.
+     * 
+ * + * double double_value = 3; + * + * @param value The doubleValue to set. + * @return This builder for chaining. + */ + public Builder setDoubleValue(double value) { + valueCase_ = 3; + value_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A 64-bit double-precision floating-point number. Its magnitude
+     * is approximately +/-10^(+/-300) and it has 16 significant digits of
+     * precision.
+     * 
+ * + * double double_value = 3; + * + * @return This builder for chaining. + */ + public Builder clearDoubleValue() { + if (valueCase_ == 3) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + + /** + * + * + *
+     * A variable-length string value.
+     * 
+ * + * string string_value = 4; + * + * @return Whether the stringValue field is set. + */ + @java.lang.Override + public boolean hasStringValue() { + return valueCase_ == 4; + } + /** + * + * + *
+     * A variable-length string value.
+     * 
+ * + * string string_value = 4; + * + * @return The stringValue. + */ + @java.lang.Override + public java.lang.String getStringValue() { + java.lang.Object ref = ""; + if (valueCase_ == 4) { + ref = value_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (valueCase_ == 4) { + value_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A variable-length string value.
+     * 
+ * + * string string_value = 4; + * + * @return The bytes for stringValue. + */ + @java.lang.Override + public com.google.protobuf.ByteString getStringValueBytes() { + java.lang.Object ref = ""; + if (valueCase_ == 4) { + ref = value_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (valueCase_ == 4) { + value_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A variable-length string value.
+     * 
+ * + * string string_value = 4; + * + * @param value The stringValue to set. + * @return This builder for chaining. + */ + public Builder setStringValue(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + valueCase_ = 4; + value_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * A variable-length string value.
+     * 
+ * + * string string_value = 4; + * + * @return This builder for chaining. + */ + public Builder clearStringValue() { + if (valueCase_ == 4) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + return this; + } + /** + * + * + *
+     * A variable-length string value.
+     * 
+ * + * string string_value = 4; + * + * @param value The bytes for stringValue to set. + * @return This builder for chaining. + */ + public Builder setStringValueBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + valueCase_ = 4; + value_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.api.Distribution, + com.google.api.Distribution.Builder, + com.google.api.DistributionOrBuilder> + distributionValueBuilder_; + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + * + * @return Whether the distributionValue field is set. + */ + @java.lang.Override + public boolean hasDistributionValue() { + return valueCase_ == 5; + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + * + * @return The distributionValue. + */ + @java.lang.Override + public com.google.api.Distribution getDistributionValue() { + if (distributionValueBuilder_ == null) { + if (valueCase_ == 5) { + return (com.google.api.Distribution) value_; + } + return com.google.api.Distribution.getDefaultInstance(); + } else { + if (valueCase_ == 5) { + return distributionValueBuilder_.getMessage(); + } + return com.google.api.Distribution.getDefaultInstance(); + } + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + public Builder setDistributionValue(com.google.api.Distribution value) { + if (distributionValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + distributionValueBuilder_.setMessage(value); + } + valueCase_ = 5; + return this; + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + public Builder setDistributionValue(com.google.api.Distribution.Builder builderForValue) { + if (distributionValueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + distributionValueBuilder_.setMessage(builderForValue.build()); + } + valueCase_ = 5; + return this; + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + public Builder mergeDistributionValue(com.google.api.Distribution value) { + if (distributionValueBuilder_ == null) { + if (valueCase_ == 5 && value_ != com.google.api.Distribution.getDefaultInstance()) { + value_ = + com.google.api.Distribution.newBuilder((com.google.api.Distribution) value_) + .mergeFrom(value) + .buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + if (valueCase_ == 5) { + distributionValueBuilder_.mergeFrom(value); + } + distributionValueBuilder_.setMessage(value); + } + valueCase_ = 5; + return this; + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + public Builder clearDistributionValue() { + if (distributionValueBuilder_ == null) { + if (valueCase_ == 5) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + } else { + if (valueCase_ == 5) { + valueCase_ = 0; + value_ = null; + } + distributionValueBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + public com.google.api.Distribution.Builder getDistributionValueBuilder() { + return getDistributionValueFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + @java.lang.Override + public com.google.api.DistributionOrBuilder getDistributionValueOrBuilder() { + if ((valueCase_ == 5) && (distributionValueBuilder_ != null)) { + return distributionValueBuilder_.getMessageOrBuilder(); + } else { + if (valueCase_ == 5) { + return (com.google.api.Distribution) value_; + } + return com.google.api.Distribution.getDefaultInstance(); + } + } + /** + * + * + *
+     * A distribution value.
+     * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.api.Distribution, + com.google.api.Distribution.Builder, + com.google.api.DistributionOrBuilder> + getDistributionValueFieldBuilder() { + if (distributionValueBuilder_ == null) { + if (!(valueCase_ == 5)) { + value_ = com.google.api.Distribution.getDefaultInstance(); + } + distributionValueBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.api.Distribution, + com.google.api.Distribution.Builder, + com.google.api.DistributionOrBuilder>( + (com.google.api.Distribution) value_, getParentForChildren(), isClean()); + value_ = null; + } + valueCase_ = 5; + onChanged(); + ; + return distributionValueBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.TypedValue) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.TypedValue) + private static final com.google.cloud.bigquery.migration.v2alpha.TypedValue DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.TypedValue(); + } + + public static com.google.cloud.bigquery.migration.v2alpha.TypedValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TypedValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TypedValue(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.migration.v2alpha.TypedValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TypedValueOrBuilder.java b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TypedValueOrBuilder.java new file mode 100644 index 0000000..656d1f7 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TypedValueOrBuilder.java @@ -0,0 +1,178 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/migration/v2alpha/migration_metrics.proto + +package com.google.cloud.bigquery.migration.v2alpha; + +public interface TypedValueOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.migration.v2alpha.TypedValue) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * A Boolean value: `true` or `false`.
+   * 
+ * + * bool bool_value = 1; + * + * @return Whether the boolValue field is set. + */ + boolean hasBoolValue(); + /** + * + * + *
+   * A Boolean value: `true` or `false`.
+   * 
+ * + * bool bool_value = 1; + * + * @return The boolValue. + */ + boolean getBoolValue(); + + /** + * + * + *
+   * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+   * 
+ * + * int64 int64_value = 2; + * + * @return Whether the int64Value field is set. + */ + boolean hasInt64Value(); + /** + * + * + *
+   * A 64-bit integer. Its range is approximately +/-9.2x10^18.
+   * 
+ * + * int64 int64_value = 2; + * + * @return The int64Value. + */ + long getInt64Value(); + + /** + * + * + *
+   * A 64-bit double-precision floating-point number. Its magnitude
+   * is approximately +/-10^(+/-300) and it has 16 significant digits of
+   * precision.
+   * 
+ * + * double double_value = 3; + * + * @return Whether the doubleValue field is set. + */ + boolean hasDoubleValue(); + /** + * + * + *
+   * A 64-bit double-precision floating-point number. Its magnitude
+   * is approximately +/-10^(+/-300) and it has 16 significant digits of
+   * precision.
+   * 
+ * + * double double_value = 3; + * + * @return The doubleValue. + */ + double getDoubleValue(); + + /** + * + * + *
+   * A variable-length string value.
+   * 
+ * + * string string_value = 4; + * + * @return Whether the stringValue field is set. + */ + boolean hasStringValue(); + /** + * + * + *
+   * A variable-length string value.
+   * 
+ * + * string string_value = 4; + * + * @return The stringValue. + */ + java.lang.String getStringValue(); + /** + * + * + *
+   * A variable-length string value.
+   * 
+ * + * string string_value = 4; + * + * @return The bytes for stringValue. + */ + com.google.protobuf.ByteString getStringValueBytes(); + + /** + * + * + *
+   * A distribution value.
+   * 
+ * + * .google.api.Distribution distribution_value = 5; + * + * @return Whether the distributionValue field is set. + */ + boolean hasDistributionValue(); + /** + * + * + *
+   * A distribution value.
+   * 
+ * + * .google.api.Distribution distribution_value = 5; + * + * @return The distributionValue. + */ + com.google.api.Distribution getDistributionValue(); + /** + * + * + *
+   * A distribution value.
+   * 
+ * + * .google.api.Distribution distribution_value = 5; + */ + com.google.api.DistributionOrBuilder getDistributionValueOrBuilder(); + + public com.google.cloud.bigquery.migration.v2alpha.TypedValue.ValueCase getValueCase(); +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..13c7d80 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,219 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..b5e9014 --- /dev/null +++ b/proto-google-cloud-bigquerymigration-v2alpha/src/main/proto/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,248 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/renovate.json b/renovate.json new file mode 100644 index 0000000..3baafbb --- /dev/null +++ b/renovate.json @@ -0,0 +1,74 @@ +{ + "extends": [ + ":separateMajorReleases", + ":combinePatchMinorReleases", + ":ignoreUnstable", + ":prImmediately", + ":updateNotScheduled", + ":automergeDisabled", + ":ignoreModulesAndTests", + ":maintainLockFilesDisabled", + ":autodetectPinVersions" + ], + "packageRules": [ + { + "packagePatterns": [ + "^com.google.guava:" + ], + "versionScheme": "docker" + }, + { + "packagePatterns": [ + "*" + ], + "semanticCommitType": "deps", + "semanticCommitScope": null + }, + { + "packagePatterns": [ + "^org.apache.maven", + "^org.jacoco:", + "^org.codehaus.mojo:", + "^org.sonatype.plugins:", + "^com.coveo:", + "^com.google.cloud:google-cloud-shared-config" + ], + "semanticCommitType": "build", + "semanticCommitScope": "deps" + }, + { + "packagePatterns": [ + "^com.google.cloud:google-cloud-bigquerymigration", + "^com.google.cloud:libraries-bom", + "^com.google.cloud.samples:shared-configuration" + ], + "semanticCommitType": "chore", + "semanticCommitScope": "deps" + }, + { + "packagePatterns": [ + "^junit:junit", + "^com.google.truth:truth", + "^org.mockito:mockito-core", + "^org.objenesis:objenesis", + "^com.google.cloud:google-cloud-conformance-tests" + ], + "semanticCommitType": "test", + "semanticCommitScope": "deps" + }, + { + "packagePatterns": [ + "^com.google.cloud:google-cloud-" + ], + "ignoreUnstable": false + }, + { + "packagePatterns": [ + "^com.fasterxml.jackson.core" + ], + "groupName": "jackson dependencies" + } + ], + "semanticCommits": true, + "dependencyDashboard": true +} diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml new file mode 100644 index 0000000..0a458d4 --- /dev/null +++ b/samples/install-without-bom/pom.xml @@ -0,0 +1,84 @@ + + + 4.0.0 + com.google.cloud + bigquerymigration-install-without-bom + jar + Google BigQuery Migration Install Without Bom + https://github.com/googleapis/java-bigquerymigration + + + + com.google.cloud.samples + shared-configuration + 1.0.12 + + + + 1.8 + 1.8 + UTF-8 + + + + + + + com.google.cloud + google-cloud-bigquerymigration + 0.0.0 + + + + + junit + junit + 4.13 + test + + + com.google.truth + truth + 1.0.1 + test + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.1.0 + + + add-snippets-source + + add-source + + + + ../snippets/src/main/java + + + + + add-snippets-tests + + add-test-source + + + + ../snippets/src/test/java + + + + + + + + diff --git a/samples/pom.xml b/samples/pom.xml new file mode 100644 index 0000000..1a17d37 --- /dev/null +++ b/samples/pom.xml @@ -0,0 +1,56 @@ + + + 4.0.0 + com.google.cloud + google-cloud-bigquerymigration-samples + 0.0.1-SNAPSHOT + pom + Google BigQuery Migration Samples Parent + https://github.com/googleapis/java-bigquerymigration + + Java idiomatic client for Google Cloud Platform services. + + + + + com.google.cloud.samples + shared-configuration + 1.0.18 + + + + 1.8 + 1.8 + UTF-8 + + + + install-without-bom + snapshot + snippets + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + true + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + + true + + + + + diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml new file mode 100644 index 0000000..1fd8e7f --- /dev/null +++ b/samples/snapshot/pom.xml @@ -0,0 +1,83 @@ + + + 4.0.0 + com.google.cloud + bigquerymigration-snapshot + jar + Google BigQuery Migration Snapshot Samples + https://github.com/googleapis/java-bigquerymigration + + + + com.google.cloud.samples + shared-configuration + 1.0.12 + + + + 1.8 + 1.8 + UTF-8 + + + + + + com.google.cloud + google-cloud-bigquerymigration + 0.0.0 + + + + + junit + junit + 4.13 + test + + + com.google.truth + truth + 1.0.1 + test + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.1.0 + + + add-snippets-source + + add-source + + + + ../snippets/src/main/java + + + + + add-snippets-tests + + add-test-source + + + + ../snippets/src/test/java + + + + + + + + \ No newline at end of file diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml new file mode 100644 index 0000000..99016dd --- /dev/null +++ b/samples/snippets/pom.xml @@ -0,0 +1,47 @@ + + + 4.0.0 + com.google.cloud + bigquerymigration-snippets + jar + Google BigQuery Migration Snippets + https://github.com/googleapis/java-bigquerymigration + + + + com.google.cloud.samples + shared-configuration + 1.0.12 + + + + 1.8 + 1.8 + UTF-8 + + + + + + com.google.cloud + google-cloud-bigquerymigration + 0.0.0 + + + + junit + junit + 4.13 + test + + + com.google.truth + truth + 1.0.1 + test + + + diff --git a/versions.txt b/versions.txt new file mode 100644 index 0000000..4dc78e8 --- /dev/null +++ b/versions.txt @@ -0,0 +1,6 @@ +# Format: +# module:released-version:current-version + +google-cloud-bigquerymigration:0.0.0:0.0.1-SNAPSHOT +grpc-google-cloud-bigquerymigration-v2alpha:0.0.0:0.0.1-SNAPSHOT +proto-google-cloud-bigquerymigration-v2alpha:0.0.0:0.0.1-SNAPSHOT