diff --git a/.github/readme/synth.metadata/synth.metadata b/.github/readme/synth.metadata/synth.metadata new file mode 100644 index 00000000..334f24be --- /dev/null +++ b/.github/readme/synth.metadata/synth.metadata @@ -0,0 +1,18 @@ +{ + "sources": [ + { + "git": { + "name": ".", + "remote": "https://github.com/googleapis/java-bigquerydatatransfer.git", + "sha": "a3bbe899149e85ba1b3669eb1122cf2a0ebcdaac" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" + } + } + ] +} \ No newline at end of file diff --git a/.github/readme/synth.py b/.github/readme/synth.py new file mode 100644 index 00000000..7b48cc28 --- /dev/null +++ b/.github/readme/synth.py @@ -0,0 +1,19 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated the README for this library.""" + +from synthtool.languages import java + +java.custom_templates(["java_library/README.md"]) diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 00000000..e69de29b diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml new file mode 100644 index 00000000..6bddd18e --- /dev/null +++ b/.github/sync-repo-settings.yaml @@ -0,0 +1,49 @@ + +# Whether or not rebase-merging is enabled on this repository. +# Defaults to `true` +rebaseMergeAllowed: false + +# Whether or not squash-merging is enabled on this repository. +# Defaults to `true` +squashMergeAllowed: true + +# Whether or not PRs are merged with a merge commit on this repository. +# Defaults to `false` +mergeCommitAllowed: false + +# Rules for master branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `master` +- pattern: master + # Can admins overwrite branch protection. + # Defaults to `true` + isAdminEnforced: true + # Number of approving reviews required to update matching branches. + # Defaults to `1` + requiredApprovingReviewCount: 1 + # Are reviews from code owners required to update matching branches. + # Defaults to `false` + requiresCodeOwnerReviews: true + # Require up to date branches + requiresStrictStatusChecks: false + # List of required status check contexts that must pass for commits to be accepted to matching branches. + requiredStatusCheckContexts: + - "dependencies (8)" + - "dependencies (11)" + - "linkage-monitor" + - "lint" + - "clirr" + - "units (7)" + - "units (8)" + - "units (11)" + - "Kokoro - Test: Integration" + - "cla/google" +# List of explicit permissions to add (additive only) +permissionRules: +- team: yoshi-admins + permission: admin +- team: yoshi-java-admins + permission: admin +- team: yoshi-java + permission: push \ No newline at end of file diff --git a/.github/workflows/approve-readme.yaml b/.github/workflows/approve-readme.yaml new file mode 100644 index 00000000..e2d841d6 --- /dev/null +++ b/.github/workflows/approve-readme.yaml @@ -0,0 +1,54 @@ +on: + pull_request: +name: auto-merge-readme +jobs: + approve: + runs-on: ubuntu-latest + if: github.repository_owner == 'googleapis' && github.head_ref == 'autosynth-readme' + steps: + - uses: actions/github-script@v3.0.0 + with: + github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} + script: | + // only approve PRs from yoshi-automation + if (context.payload.pull_request.user.login !== "yoshi-automation") { + return; + } + + // only approve PRs like "chore: release " + if (!context.payload.pull_request.title === "chore: regenerate README") { + return; + } + + // only approve PRs with README.md and synth.metadata changes + const files = new Set( + ( + await github.paginate( + github.pulls.listFiles.endpoint({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number, + }) + ) + ).map(file => file.filename) + ); + if (files.size != 2 || !files.has("README.md") || !files.has(".github/readme/synth.metadata/synth.metadata")) { + return; + } + + // approve README regeneration PR + await github.pulls.createReview({ + owner: context.repo.owner, + repo: context.repo.repo, + body: 'Rubber stamped PR!', + pull_number: context.payload.pull_request.number, + event: 'APPROVE' + }); + + // attach automerge label + await github.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + labels: ['automerge'] + }); diff --git a/.github/workflows/auto-release.yaml b/.github/workflows/auto-release.yaml index d26427e4..bc1554ae 100644 --- a/.github/workflows/auto-release.yaml +++ b/.github/workflows/auto-release.yaml @@ -4,10 +4,11 @@ name: auto-release jobs: approve: runs-on: ubuntu-latest + if: contains(github.head_ref, 'release-v') steps: - uses: actions/github-script@v3.0.0 with: - github-token: ${{secrets.GITHUB_TOKEN}} + github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} debug: true script: | // only approve PRs from release-please[bot] @@ -20,6 +21,24 @@ jobs: return; } + // only approve PRs with pom.xml and versions.txt changes + const filesPromise = github.pulls.listFiles.endpoint({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number, + }); + const changed_files = await github.paginate(filesPromise) + + if ( changed_files.length < 1 ) { + console.log( "Not proceeding since PR is empty!" ) + return; + } + + if ( !changed_files.some(v => v.filename.includes("pom")) || !changed_files.some(v => v.filename.includes("versions.txt")) ) { + console.log( "PR file changes do not have pom.xml or versions.txt -- something is wrong. PTAL!" ) + return; + } + // trigger auto-release when // 1) it is a SNAPSHOT release (auto-generated post regular release) // 2) there are dependency updates only @@ -66,4 +85,4 @@ jobs: repo: context.repo.repo, issue_number: context.payload.pull_request.number, labels: ['kokoro:force-run', 'automerge'] - }); \ No newline at end of file + }); diff --git a/.github/workflows/formatting.yaml b/.github/workflows/formatting.yaml new file mode 100644 index 00000000..d4d367cf --- /dev/null +++ b/.github/workflows/formatting.yaml @@ -0,0 +1,25 @@ +on: + pull_request_target: + types: [opened, synchronize] + branches: + - master +name: format +jobs: + format-code: + runs-on: ubuntu-latest + env: + ACCESS_TOKEN: ${{ secrets.YOSHI_CODE_BOT_TOKEN }} + steps: + - uses: actions/checkout@v2 + with: + ref: ${{github.event.pull_request.head.ref}} + repository: ${{github.event.pull_request.head.repo.full_name}} + - uses: actions/setup-java@v1 + with: + java-version: 11 + - run: "mvn com.coveo:fmt-maven-plugin:format" + - uses: googleapis/code-suggester@v1.8.0 + with: + command: review + pull_number: ${{ github.event.pull_request.number }} + git_dir: '.' diff --git a/.github/workflows/samples.yaml b/.github/workflows/samples.yaml index a1d50073..c46230a7 100644 --- a/.github/workflows/samples.yaml +++ b/.github/workflows/samples.yaml @@ -2,7 +2,7 @@ on: pull_request: name: samples jobs: - lint: + checkstyle: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 77c73321..f48f3687 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -69,14 +69,21 @@ integration) RETURN_CODE=$? ;; samples) - if [[ -f samples/pom.xml ]] + SAMPLES_DIR=samples + # only run ITs in snapshot/ on presubmit PRs. run ITs in all 3 samples/ subdirectories otherwise. + if [[ ! -z ${KOKORO_GITHUB_PULL_REQUEST_NUMBER} ]] + then + SAMPLES_DIR=samples/snapshot + fi + + if [[ -f ${SAMPLES_DIR}/pom.xml ]] then if [ -f "${KOKORO_GFILE_DIR}/secret_manager/java-bigquerydatatransfer-samples-secrets" ] then source "${KOKORO_GFILE_DIR}/secret_manager/java-bigquerydatatransfer-samples-secrets" fi - pushd samples + pushd ${SAMPLES_DIR} mvn -B \ -Penable-samples \ -DtrimStackTrace=false \ diff --git a/.kokoro/continuous/readme.cfg b/.kokoro/continuous/readme.cfg new file mode 100644 index 00000000..f282bdc6 --- /dev/null +++ b/.kokoro/continuous/readme.cfg @@ -0,0 +1,55 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/java-bigquerydatatransfer/.kokoro/readme.sh" +} + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + regex: "**/*sponge_log.log" + } +} + +# The github token is stored here. +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + # TODO(theacodes): remove this after secrets have globally propagated + backend_type: FASTCONFIGPUSH + } + } +} + +# Common env vars for all repositories and builds. +env_vars: { + key: "GITHUB_USER" + value: "yoshi-automation" +} +env_vars: { + key: "GITHUB_EMAIL" + value: "yoshi-automation@google.com" +} diff --git a/.kokoro/readme.sh b/.kokoro/readme.sh new file mode 100755 index 00000000..cc377ded --- /dev/null +++ b/.kokoro/readme.sh @@ -0,0 +1,36 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +cd ${KOKORO_ARTIFACTS_DIR}/github/java-bigquerydatatransfer + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Kokoro exposes this as a file, but the scripts expect just a plain variable. +export GITHUB_TOKEN=$(cat ${KOKORO_KEYSTORE_DIR}/73713_yoshi-automation-github-key) + +# Setup git credentials +echo "https://${GITHUB_TOKEN}:@github.com" >> ~/.git-credentials +git config --global credential.helper 'store --file ~/.git-credentials' + +python3.6 -m pip install git+https://github.com/googleapis/synthtool.git#egg=gcp-synthtool +python3.6 -m autosynth.synth \ + --repository=googleapis/java-bigquerydatatransfer \ + --synth-file-name=.github/readme/synth.py \ + --metadata-path=.github/readme/synth.metadata \ + --pr-title="chore: regenerate README" \ + --branch-suffix="readme" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 3dbc388b..43955b6f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,37 @@ # Changelog +### [1.0.13](https://www.github.com/googleapis/java-bigquerydatatransfer/compare/v1.0.12...v1.0.13) (2020-10-21) + + +### Documentation + +* **samples:** add copy dataset ([#389](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/389)) ([37097f8](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/37097f8d1e7745007f39533dc9fcd8256ac2ce3f)) +* **samples:** add create amazon s3 transfer ([#416](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/416)) ([52ac0a0](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/52ac0a029c1f47c5cdd9268e320a48ce80f7c212)) +* **samples:** add create cloud storage transfer ([#431](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/431)) ([0938236](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/093823687b6d870e3e443d349fe6a2ad035e6a85)) +* **samples:** add delete transfer config ([#388](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/388)) ([61ce27d](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/61ce27d43653ce5f2ae8647370240ec80148399e)) +* **samples:** add disable transfer config ([#384](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/384)) ([533a681](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/533a68158dd92d083aba9e13493e4d66cd3d85df)) +* **samples:** add re-enable transfer config ([#385](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/385)) ([7d61025](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/7d61025046c6172dbe8f1f52bd90324f9a5357c4)) +* **samples:** add schedule backfill ([#390](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/390)) ([96eb331](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/96eb331fdf9d4862d9149bb84d6d2493218db2fc)) +* **samples:** add update credentials ([#425](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/425)) ([16022ea](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/16022eaa8f93b497fb569fe5be5ed996993adf02)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigquery to v1.122.0 ([#398](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/398)) ([e0edcd7](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/e0edcd7c1a7d22c158ea39ffcb4018dae181c6b0)) +* update dependency com.google.cloud:google-cloud-bigquery to v1.122.1 ([#411](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/411)) ([41fb285](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/41fb285d233d9598e7b54e49b4a67b0c49bb4af3)) +* update dependency com.google.cloud:google-cloud-bigquery to v1.122.2 ([#418](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/418)) ([ad92b1a](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/ad92b1af71bbf747da57aae011f47bc5d7f07c27)) +* update dependency com.google.cloud:google-cloud-bigquery to v1.123.0 ([#444](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/444)) ([85d78bc](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/85d78bc1770969e1e5feda76d3134ae3313a2652)) +* update dependency com.google.cloud:google-cloud-pubsub to v1.108.3 ([#407](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/407)) ([78e055e](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/78e055e61ead073bda37289043c350b93c8ace50)) +* update dependency com.google.cloud:google-cloud-pubsub to v1.108.4 ([#423](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/423)) ([1807342](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/18073427ef4c0103949f6f7285c8869e08ff8205)) +* update dependency com.google.cloud:google-cloud-pubsub to v1.108.5 ([#440](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/440)) ([5c33507](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/5c33507b9f9628eee84b1afcbebd54cc31b101a6)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.10.1 ([#412](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/412)) ([0344c1e](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/0344c1e844de6c82a48a8de0b4afcc188cbda1c5)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.10.2 ([#415](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/415)) ([4eb4530](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/4eb4530c54704eaf5418331746b84e5a895bf930)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.11.0 ([#428](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/428)) ([1247468](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/1247468d50dfc8a57b92a84974e8593293e88aeb)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.12.1 ([#439](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/439)) ([d9c7a5c](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/d9c7a5c6b57b3a3564457d6d68d645bc20ef4b5b)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.13.0 ([#445](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/445)) ([e5027fc](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/e5027fceb9665e1a8849c95d4d67a8ee3d567cff)) +* update dependency com.google.truth:truth to v1.1 ([#443](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/443)) ([f966881](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/f96688152dcb2cac2e0dae6f4aa8527c86df9896)) +* update dependency junit:junit to v4.13.1 ([#421](https://www.github.com/googleapis/java-bigquerydatatransfer/issues/421)) ([310122b](https://www.github.com/googleapis/java-bigquerydatatransfer/commit/310122bb335e8f716d12890c8e06397e1b997ac3)) + ### [1.0.12](https://www.github.com/googleapis/java-bigquerydatatransfer/compare/v1.0.11...v1.0.12) (2020-09-23) diff --git a/README.md b/README.md index 7a90cdf4..ba02c301 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file com.google.cloud libraries-bom - 10.1.0 + 13.0.0 pom import @@ -38,13 +38,11 @@ If you are using Maven without BOM, add this to your dependencies: com.google.cloud google-cloud-bigquerydatatransfer - 1.0.11 + 1.0.12 ``` -[//]: # ({x-version-update-start:google-cloud-bigquerydatatransfer:released}) - If you are using Gradle, add this to your dependencies ```Groovy compile 'com.google.cloud:google-cloud-bigquerydatatransfer:1.0.12' @@ -53,7 +51,6 @@ If you are using SBT, add this to your dependencies ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-bigquerydatatransfer" % "1.0.12" ``` -[//]: # ({x-version-update-end}) ## Authentication @@ -93,15 +90,23 @@ has instructions for running the samples. | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | +| Copy Dataset | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CopyDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/CopyDataset.java) | +| Create Amazon S3 Transfer | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateAmazonS3Transfer.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateAmazonS3Transfer.java) | +| Create Cloud Storage Transfer | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateCloudStorageTransfer.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateCloudStorageTransfer.java) | | Create Scheduled Query | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateScheduledQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateScheduledQuery.java) | | Create Scheduled Query With Service Account | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateScheduledQueryWithServiceAccount.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateScheduledQueryWithServiceAccount.java) | | Delete Scheduled Query | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteScheduledQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteScheduledQuery.java) | +| Delete Transfer Config | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteTransferConfig.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteTransferConfig.java) | +| Disable Transfer Config | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DisableTransferConfig.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/DisableTransferConfig.java) | | Get Transfer Config Info | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/GetTransferConfigInfo.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/GetTransferConfigInfo.java) | | List Transfer Configs | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ListTransferConfigs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/ListTransferConfigs.java) | | Quickstart Sample | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/QuickstartSample.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/QuickstartSample.java) | +| Re Enable Transfer Config | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ReEnableTransferConfig.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/ReEnableTransferConfig.java) | | Run Details | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/RunDetails.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/RunDetails.java) | | Run History | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/RunHistory.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/RunHistory.java) | | Run Notification | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/RunNotification.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/RunNotification.java) | +| Schedule Back Fill | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ScheduleBackFill.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/ScheduleBackFill.java) | +| Update Credentials | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateCredentials.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateCredentials.java) | | Update Transfer Config | [source code](https://github.com/googleapis/java-bigquerydatatransfer/blob/master/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateTransferConfig.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerydatatransfer&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateTransferConfig.java) | diff --git a/google-cloud-bigquerydatatransfer-bom/pom.xml b/google-cloud-bigquerydatatransfer-bom/pom.xml index 84643a04..0da7c400 100644 --- a/google-cloud-bigquerydatatransfer-bom/pom.xml +++ b/google-cloud-bigquerydatatransfer-bom/pom.xml @@ -3,12 +3,12 @@ 4.0.0 com.google.cloud google-cloud-bigquerydatatransfer-bom - 1.0.12 + 1.0.13 pom com.google.cloud google-cloud-shared-config - 0.9.2 + 0.9.3 Google Cloud bigquerydatatransfer BOM @@ -64,17 +64,17 @@ com.google.cloud google-cloud-bigquerydatatransfer - 1.0.12 + 1.0.13 com.google.api.grpc proto-google-cloud-bigquerydatatransfer-v1 - 1.0.12 + 1.0.13 com.google.api.grpc grpc-google-cloud-bigquerydatatransfer-v1 - 1.0.12 + 1.0.13 diff --git a/google-cloud-bigquerydatatransfer/pom.xml b/google-cloud-bigquerydatatransfer/pom.xml index 8d9c45ea..de85d92a 100644 --- a/google-cloud-bigquerydatatransfer/pom.xml +++ b/google-cloud-bigquerydatatransfer/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquerydatatransfer - 1.0.12 + 1.0.13 jar BigQuery DataTransfer https://github.com/googleapis/java-bigquerydatatransfer @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquerydatatransfer-parent - 1.0.12 + 1.0.13 google-cloud-bigquerydatatransfer diff --git a/grpc-google-cloud-bigquerydatatransfer-v1/pom.xml b/grpc-google-cloud-bigquerydatatransfer-v1/pom.xml index d04c6a82..7f2e4531 100644 --- a/grpc-google-cloud-bigquerydatatransfer-v1/pom.xml +++ b/grpc-google-cloud-bigquerydatatransfer-v1/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc grpc-google-cloud-bigquerydatatransfer-v1 - 1.0.12 + 1.0.13 grpc-google-cloud-bigquerydatatransfer-v1 GRPC library for grpc-google-cloud-bigquerydatatransfer-v1 com.google.cloud google-cloud-bigquerydatatransfer-parent - 1.0.12 + 1.0.13 diff --git a/pom.xml b/pom.xml index e3cc9eb7..dca50960 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquerydatatransfer-parent pom - 1.0.12 + 1.0.13 BigQuery DataTransfer Parent https://github.com/googleapis/java-bigquerydatatransfer @@ -14,7 +14,7 @@ com.google.cloud google-cloud-shared-config - 0.9.2 + 0.9.3 @@ -70,31 +70,31 @@ com.google.cloud google-cloud-shared-dependencies - 0.10.0 + 0.13.0 pom import com.google.api.grpc proto-google-cloud-bigquerydatatransfer-v1 - 1.0.12 + 1.0.13 com.google.api.grpc grpc-google-cloud-bigquerydatatransfer-v1 - 1.0.12 + 1.0.13 com.google.cloud google-cloud-bigquerydatatransfer - 1.0.12 + 1.0.13 junit junit - 4.13 + 4.13.1 test diff --git a/proto-google-cloud-bigquerydatatransfer-v1/pom.xml b/proto-google-cloud-bigquerydatatransfer-v1/pom.xml index 1b16a42e..01e80e9b 100644 --- a/proto-google-cloud-bigquerydatatransfer-v1/pom.xml +++ b/proto-google-cloud-bigquerydatatransfer-v1/pom.xml @@ -4,13 +4,13 @@ 4.0.0 com.google.api.grpc proto-google-cloud-bigquerydatatransfer-v1 - 1.0.12 + 1.0.13 proto-google-cloud-bigquerydatatransfer-v1 PROTO library for proto-google-cloud-bigquerydatatransfer-v1 com.google.cloud google-cloud-bigquerydatatransfer-parent - 1.0.12 + 1.0.13 diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index ae687104..2648e51d 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -14,7 +14,7 @@ com.google.cloud.samples shared-configuration - 1.0.18 + 1.0.21 @@ -23,13 +23,12 @@ UTF-8 - com.google.cloud google-cloud-bigquerydatatransfer - 1.0.11 + 1.0.12 @@ -42,25 +41,25 @@ junit junit - 4.13 + 4.13.1 test com.google.truth truth - 1.0.1 + 1.1 test com.google.cloud google-cloud-bigquery - 1.120.0 + 1.123.0 test com.google.cloud google-cloud-pubsub - 1.108.1 + 1.108.5 test diff --git a/samples/pom.xml b/samples/pom.xml index 41ce1664..39cb2d90 100644 --- a/samples/pom.xml +++ b/samples/pom.xml @@ -18,7 +18,7 @@ com.google.cloud.samples shared-configuration - 1.0.18 + 1.0.21 diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 43bc6358..2c85420f 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -14,7 +14,7 @@ com.google.cloud.samples shared-configuration - 1.0.18 + 1.0.21 @@ -28,7 +28,7 @@ com.google.cloud google-cloud-bigquerydatatransfer - 1.0.12 + 1.0.13 @@ -41,25 +41,25 @@ junit junit - 4.13 + 4.13.1 test com.google.truth truth - 1.0.1 + 1.1 test com.google.cloud google-cloud-bigquery - 1.120.0 + 1.123.0 test com.google.cloud google-cloud-pubsub - 1.108.1 + 1.108.5 test diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 3e418151..9822436d 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -14,7 +14,7 @@ com.google.cloud.samples shared-configuration - 1.0.18 + 1.0.21 @@ -30,7 +30,7 @@ com.google.cloud libraries-bom - 10.1.0 + 13.0.0 pom import @@ -53,25 +53,25 @@ junit junit - 4.13 + 4.13.1 test com.google.truth truth - 1.0.1 + 1.1 test com.google.cloud google-cloud-bigquery - 1.120.0 + 1.123.0 test com.google.cloud google-cloud-pubsub - 1.108.1 + 1.108.5 test diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CopyDataset.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CopyDataset.java new file mode 100644 index 00000000..631befa3 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CopyDataset.java @@ -0,0 +1,70 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_copy_dataset] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.ProjectName; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +// Sample to copy dataset from another gcp project +public class CopyDataset { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + final String destinationProjectId = "MY_DESTINATION_PROJECT_ID"; + final String destinationDatasetId = "MY_DESTINATION_DATASET_ID"; + final String sourceProjectId = "MY_SOURCE_PROJECT_ID"; + final String sourceDatasetId = "MY_SOURCE_DATASET_ID"; + Map params = new HashMap<>(); + params.put("source_project_id", Value.newBuilder().setStringValue(sourceProjectId).build()); + params.put("source_dataset_id", Value.newBuilder().setStringValue(sourceDatasetId).build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(destinationDatasetId) + .setDisplayName("Your Dataset Copy Name") + .setDataSourceId("cross_region_copy") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + copyDataset(destinationProjectId, transferConfig); + } + + public static void copyDataset(String projectId, TransferConfig transferConfig) + throws IOException { + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + ProjectName parent = ProjectName.of(projectId); + CreateTransferConfigRequest request = + CreateTransferConfigRequest.newBuilder() + .setParent(parent.toString()) + .setTransferConfig(transferConfig) + .build(); + TransferConfig config = dataTransferServiceClient.createTransferConfig(request); + System.out.println("Copy dataset created successfully :" + config.getName()); + } catch (ApiException ex) { + System.out.print("Copy dataset was not created." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_copy_dataset] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateAmazonS3Transfer.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateAmazonS3Transfer.java new file mode 100644 index 00000000..00b2e3fe --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateAmazonS3Transfer.java @@ -0,0 +1,82 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_create_amazons3_transfer] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.ProjectName; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +// Sample to create amazon s3 transfer config. +public class CreateAmazonS3Transfer { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + final String projectId = "MY_PROJECT_ID"; + String datasetId = "MY_DATASET_ID"; + String tableId = "MY_TABLE_ID"; + // Amazon S3 Bucket Uri with read role permission + String sourceUri = "s3://your-bucket-name/*"; + String awsAccessKeyId = "MY_AWS_ACCESS_KEY_ID"; + String awsSecretAccessId = "AWS_SECRET_ACCESS_ID"; + String sourceFormat = "CSV"; + String fieldDelimiter = ","; + String skipLeadingRows = "1"; + Map params = new HashMap<>(); + params.put( + "destination_table_name_template", Value.newBuilder().setStringValue(tableId).build()); + params.put("data_path", Value.newBuilder().setStringValue(sourceUri).build()); + params.put("access_key_id", Value.newBuilder().setStringValue(awsAccessKeyId).build()); + params.put("secret_access_key", Value.newBuilder().setStringValue(awsSecretAccessId).build()); + params.put("source_format", Value.newBuilder().setStringValue(sourceFormat).build()); + params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build()); + params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(datasetId) + .setDisplayName("Your Aws S3 Config Name") + .setDataSourceId("amazon_s3") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + createAmazonS3Transfer(projectId, transferConfig); + } + + public static void createAmazonS3Transfer(String projectId, TransferConfig transferConfig) + throws IOException { + try (DataTransferServiceClient client = DataTransferServiceClient.create()) { + ProjectName parent = ProjectName.of(projectId); + CreateTransferConfigRequest request = + CreateTransferConfigRequest.newBuilder() + .setParent(parent.toString()) + .setTransferConfig(transferConfig) + .build(); + TransferConfig config = client.createTransferConfig(request); + System.out.println("Amazon s3 transfer created successfully :" + config.getName()); + } catch (ApiException ex) { + System.out.print("Amazon s3 transfer was not created." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_create_amazons3_transfer] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateCloudStorageTransfer.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateCloudStorageTransfer.java new file mode 100644 index 00000000..c4f1e41c --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/CreateCloudStorageTransfer.java @@ -0,0 +1,79 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_create_cloudstorage_transfer] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.ProjectName; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +// Sample to create google cloud storage transfer config +public class CreateCloudStorageTransfer { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + final String projectId = "MY_PROJECT_ID"; + String datasetId = "MY_DATASET_ID"; + String tableId = "MY_TABLE_ID"; + // GCS Uri + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv"; + String fileFormat = "CSV"; + String fieldDelimiter = ","; + String skipLeadingRows = "1"; + Map params = new HashMap<>(); + params.put( + "destination_table_name_template", Value.newBuilder().setStringValue(tableId).build()); + params.put("data_path_template", Value.newBuilder().setStringValue(sourceUri).build()); + params.put("write_disposition", Value.newBuilder().setStringValue("APPEND").build()); + params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build()); + params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build()); + params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(datasetId) + .setDisplayName("Your Google Cloud Storage Config Name") + .setDataSourceId("google_cloud_storage") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + createCloudStorageTransfer(projectId, transferConfig); + } + + public static void createCloudStorageTransfer(String projectId, TransferConfig transferConfig) + throws IOException { + try (DataTransferServiceClient client = DataTransferServiceClient.create()) { + ProjectName parent = ProjectName.of(projectId); + CreateTransferConfigRequest request = + CreateTransferConfigRequest.newBuilder() + .setParent(parent.toString()) + .setTransferConfig(transferConfig) + .build(); + TransferConfig config = client.createTransferConfig(request); + System.out.println("Cloud storage transfer created successfully :" + config.getName()); + } catch (ApiException ex) { + System.out.print("Cloud storage transfer was not created." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_create_cloudstorage_transfer] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteTransferConfig.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteTransferConfig.java new file mode 100644 index 00000000..b878b253 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DeleteTransferConfig.java @@ -0,0 +1,47 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_delete_transfer] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest; +import java.io.IOException; + +// Sample to delete a transfer config +public class DeleteTransferConfig { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + // i.e projects/{project_id}/transferConfigs/{config_id}` or + // `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + String configId = "MY_CONFIG_ID"; + deleteTransferConfig(configId); + } + + public static void deleteTransferConfig(String configId) throws IOException { + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + DeleteTransferConfigRequest request = + DeleteTransferConfigRequest.newBuilder().setName(configId).build(); + dataTransferServiceClient.deleteTransferConfig(request); + System.out.println("Transfer config deleted successfully"); + } catch (ApiException ex) { + System.out.println("Transfer config was not deleted." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_delete_transfer] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DisableTransferConfig.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DisableTransferConfig.java new file mode 100644 index 00000000..5b88cee0 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/DisableTransferConfig.java @@ -0,0 +1,55 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_disable_transfer] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.IOException; + +// Sample to disable transfer config. +public class DisableTransferConfig { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + String configId = "MY_CONFIG_ID"; + TransferConfig transferConfig = + TransferConfig.newBuilder().setName(configId).setDisabled(true).build(); + FieldMask updateMask = FieldMaskUtil.fromString("disabled"); + disableTransferConfig(transferConfig, updateMask); + } + + public static void disableTransferConfig(TransferConfig transferConfig, FieldMask updateMask) + throws IOException { + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + UpdateTransferConfigRequest request = + UpdateTransferConfigRequest.newBuilder() + .setTransferConfig(transferConfig) + .setUpdateMask(updateMask) + .build(); + TransferConfig updateConfig = dataTransferServiceClient.updateTransferConfig(request); + System.out.println("Transfer config disabled successfully :" + updateConfig.getDisplayName()); + } catch (ApiException ex) { + System.out.print("Transfer config was not disabled." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_disable_transfer] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ReEnableTransferConfig.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ReEnableTransferConfig.java new file mode 100644 index 00000000..64cca379 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ReEnableTransferConfig.java @@ -0,0 +1,55 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_reenable_transfer] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.IOException; + +// Sample to re-enable transfer config. +public class ReEnableTransferConfig { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + String configId = "MY_CONFIG_ID"; + TransferConfig transferConfig = + TransferConfig.newBuilder().setName(configId).setDisabled(false).build(); + FieldMask updateMask = FieldMaskUtil.fromString("disabled"); + reEnableTransferConfig(transferConfig, updateMask); + } + + public static void reEnableTransferConfig(TransferConfig transferConfig, FieldMask updateMask) + throws IOException { + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + UpdateTransferConfigRequest request = + UpdateTransferConfigRequest.newBuilder() + .setTransferConfig(transferConfig) + .setUpdateMask(updateMask) + .build(); + TransferConfig updateConfig = dataTransferServiceClient.updateTransferConfig(request); + System.out.println("Transfer config reenable successfully :" + updateConfig.getDisplayName()); + } catch (ApiException ex) { + System.out.print("Transfer config was not reenable." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_reenable_transfer] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ScheduleBackFill.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ScheduleBackFill.java new file mode 100644 index 00000000..5a79d492 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/ScheduleBackFill.java @@ -0,0 +1,78 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_schedule_backfill] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.ScheduleOptions; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest; +import com.google.common.collect.ImmutableList; +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.IOException; +import org.threeten.bp.Clock; +import org.threeten.bp.Instant; +import org.threeten.bp.temporal.ChronoUnit; + +// Sample to update schedule back fill for transfer config +public class ScheduleBackFill { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + String configId = "MY_CONFIG_ID"; + Clock clock = Clock.systemDefaultZone(); + Instant instant = clock.instant(); + Timestamp startDate = + Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + Timestamp endDate = + Timestamp.newBuilder() + .setSeconds(instant.plus(10, ChronoUnit.DAYS).getEpochSecond()) + .setNanos(instant.plus(10, ChronoUnit.DAYS).getNano()) + .build(); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setName(configId) + .setScheduleOptions( + ScheduleOptions.newBuilder().setStartTime(startDate).setEndTime(endDate).build()) + .build(); + FieldMask updateMask = FieldMaskUtil.fromStringList(ImmutableList.of("start_time", "end_time")); + scheduleBackFill(transferConfig, updateMask); + } + + public static void scheduleBackFill(TransferConfig transferConfig, FieldMask updateMask) + throws IOException { + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + UpdateTransferConfigRequest request = + UpdateTransferConfigRequest.newBuilder() + .setTransferConfig(transferConfig) + .setUpdateMask(updateMask) + .build(); + TransferConfig updateConfig = dataTransferServiceClient.updateTransferConfig(request); + System.out.println( + "Schedule backfill updated successfully :" + updateConfig.getDisplayName()); + } catch (ApiException ex) { + System.out.print("Schedule backfill was not updated." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_schedule_backfill] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateCredentials.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateCredentials.java new file mode 100644 index 00000000..ccdbde3b --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateCredentials.java @@ -0,0 +1,57 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +// [START bigquerydatatransfer_update_credentials] +import com.google.api.gax.rpc.ApiException; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.IOException; + +// Sample to update credentials in transfer config. +public class UpdateCredentials { + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + String configId = "MY_CONFIG_ID"; + String serviceAccount = "MY_SERVICE_ACCOUNT"; + TransferConfig transferConfig = TransferConfig.newBuilder().setName(configId).build(); + FieldMask updateMask = FieldMaskUtil.fromString("service_account_name"); + updateCredentials(transferConfig, serviceAccount, updateMask); + } + + public static void updateCredentials( + TransferConfig transferConfig, String serviceAccount, FieldMask updateMask) + throws IOException { + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + UpdateTransferConfigRequest request = + UpdateTransferConfigRequest.newBuilder() + .setTransferConfig(transferConfig) + .setUpdateMask(updateMask) + .setServiceAccountName(serviceAccount) + .build(); + dataTransferServiceClient.updateTransferConfig(request); + System.out.println("Credentials updated successfully"); + } catch (ApiException ex) { + System.out.print("Credentials was not updated." + ex.toString()); + } + } +} +// [END bigquerydatatransfer_update_credentials] diff --git a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateTransferConfig.java b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateTransferConfig.java index 7d45ed69..09e1f057 100644 --- a/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateTransferConfig.java +++ b/samples/snippets/src/main/java/com/example/bigquerydatatransfer/UpdateTransferConfig.java @@ -31,17 +31,18 @@ public class UpdateTransferConfig { public static void main(String[] args) throws IOException { // TODO(developer): Replace these variables before running the sample. String configId = "MY_CONFIG_ID"; - updateTransferConfig(configId); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setName(configId) + .setDisplayName("UPDATED_DISPLAY_NAME") + .build(); + FieldMask updateMask = FieldMaskUtil.fromString("display_name"); + updateTransferConfig(transferConfig, updateMask); } - public static void updateTransferConfig(String configId) throws IOException { + public static void updateTransferConfig(TransferConfig transferConfig, FieldMask updateMask) + throws IOException { try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { - TransferConfig transferConfig = - TransferConfig.newBuilder() - .setName(configId) - .setDisplayName("UPDATED_DISPLAY_NAME") - .build(); - FieldMask updateMask = FieldMaskUtil.fromString("display_name"); UpdateTransferConfigRequest request = UpdateTransferConfigRequest.newBuilder() .setTransferConfig(transferConfig) diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CopyDatasetIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CopyDatasetIT.java new file mode 100644 index 00000000..84f05fa3 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CopyDatasetIT.java @@ -0,0 +1,113 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CopyDatasetIT { + + private static final Logger LOG = Logger.getLogger(CopyDatasetIT.class.getName()); + private BigQuery bigquery; + private ByteArrayOutputStream bout; + private String name; + private String displayName; + private String datasetName; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } + + @Before + public void setUp() { + displayName = "MY_COPY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); + datasetName = "MY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); + // create a temporary dataset + bigquery = BigQueryOptions.getDefaultInstance().getService(); + bigquery.create(DatasetInfo.of(datasetName)); + + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() throws IOException { + // TODO(pmakani) replace DeleteTransferConfig once PR merged. + // Clean up + DeleteScheduledQuery.deleteScheduledQuery(name); + // delete a temporary dataset + bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents()); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testCopyDataset() throws IOException { + Map params = new HashMap<>(); + params.put( + "source_project_id", Value.newBuilder().setStringValue("bigquery-public-data").build()); + params.put("source_dataset_id", Value.newBuilder().setStringValue("usa_names").build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(datasetName) + .setDisplayName(displayName) + .setDataSourceId("cross_region_copy") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + CopyDataset.copyDataset(PROJECT_ID, transferConfig); + String result = bout.toString(); + name = result.substring(result.indexOf(":") + 1, result.length() - 1); + assertThat(result).contains("Copy dataset created successfully :"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CreateAmazonS3TransferIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CreateAmazonS3TransferIT.java new file mode 100644 index 00000000..7d037daf --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CreateAmazonS3TransferIT.java @@ -0,0 +1,148 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CreateAmazonS3TransferIT { + + private static final Logger LOG = Logger.getLogger(CreateAmazonS3TransferIT.class.getName()); + private static final String ID = UUID.randomUUID().toString().substring(0, 8); + private BigQuery bigquery; + private ByteArrayOutputStream bout; + private String name; + private String displayName; + private String datasetName; + private String tableName; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + private static final String AWS_ACCESS_KEY_ID = requireEnvVar("AWS_ACCESS_KEY_ID"); + private static final String AWS_SECRET_ACCESS_KEY = requireEnvVar("AWS_SECRET_ACCESS_KEY"); + private static final String AWS_BUCKET = requireEnvVar("AWS_BUCKET"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + requireEnvVar("AWS_ACCESS_KEY_ID"); + requireEnvVar("AWS_SECRET_ACCESS_KEY"); + requireEnvVar("AWS_BUCKET"); + } + + @Before + public void setUp() { + displayName = "MY_SCHEDULE_NAME_TEST_" + ID; + datasetName = "MY_DATASET_NAME_TEST_" + ID; + tableName = "MY_TABLE_NAME_TEST_" + ID; + // create a temporary dataset + bigquery = BigQueryOptions.getDefaultInstance().getService(); + bigquery.create(DatasetInfo.of(datasetName)); + // create a temporary table + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING)); + TableDefinition tableDefinition = StandardTableDefinition.of(schema); + TableInfo tableInfo = TableInfo.of(TableId.of(datasetName, tableName), tableDefinition); + bigquery.create(tableInfo); + + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() throws IOException { + // Clean up + DeleteScheduledQuery.deleteScheduledQuery(name); + // delete a temporary table + bigquery.delete(TableId.of(datasetName, tableName)); + // delete a temporary dataset + bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents()); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testCreateAmazonS3Transfer() throws IOException { + String sourceUri = String.format("s3://%s/*", AWS_BUCKET); + String fileFormat = "CSV"; + String fieldDelimiter = ","; + String skipLeadingRows = "1"; + Map params = new HashMap<>(); + params.put( + "destination_table_name_template", Value.newBuilder().setStringValue(tableName).build()); + params.put("data_path", Value.newBuilder().setStringValue(sourceUri).build()); + params.put("access_key_id", Value.newBuilder().setStringValue(AWS_ACCESS_KEY_ID).build()); + params.put( + "secret_access_key", Value.newBuilder().setStringValue(AWS_SECRET_ACCESS_KEY).build()); + params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build()); + params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build()); + params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(datasetName) + .setDisplayName(displayName) + .setDataSourceId("amazon_s3") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + CreateAmazonS3Transfer.createAmazonS3Transfer(PROJECT_ID, transferConfig); + String result = bout.toString(); + name = result.substring(result.indexOf(":") + 1, result.length() - 1); + assertThat(result).contains("Amazon s3 transfer created successfully :"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CreateCloudStorageTransferIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CreateCloudStorageTransferIT.java new file mode 100644 index 00000000..2121b2fb --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/CreateCloudStorageTransferIT.java @@ -0,0 +1,140 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CreateCloudStorageTransferIT { + + private static final Logger LOG = Logger.getLogger(CreateCloudStorageTransferIT.class.getName()); + private static final String ID = UUID.randomUUID().toString().substring(0, 8); + private BigQuery bigquery; + private ByteArrayOutputStream bout; + private String name; + private String displayName; + private String datasetName; + private String tableName; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } + + @Before + public void setUp() { + displayName = "CLOUD_STORAGE_CONFIG_TEST_" + ID; + datasetName = "CLOUD_STORAGE_DATASET_NAME_TEST_" + ID; + tableName = "CLOUD_STORAGE_TABLE_NAME_TEST_" + ID; + // create a temporary dataset + bigquery = BigQueryOptions.getDefaultInstance().getService(); + bigquery.create(DatasetInfo.of(datasetName)); + // create a temporary table + Schema schema = + Schema.of( + Field.of("name", StandardSQLTypeName.STRING), + Field.of("post_abbr", StandardSQLTypeName.STRING)); + TableDefinition tableDefinition = StandardTableDefinition.of(schema); + TableInfo tableInfo = TableInfo.of(TableId.of(datasetName, tableName), tableDefinition); + bigquery.create(tableInfo); + + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() throws IOException { + // Clean up + DeleteTransferConfig.deleteTransferConfig(name); + // delete a temporary table + bigquery.delete(TableId.of(datasetName, tableName)); + // delete a temporary dataset + bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents()); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testCreateCloudStorageTransfer() throws IOException { + String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv"; + String fileFormat = "CSV"; + String fieldDelimiter = ","; + String skipLeadingRows = "1"; + Map params = new HashMap<>(); + params.put( + "destination_table_name_template", Value.newBuilder().setStringValue(tableName).build()); + params.put("data_path_template", Value.newBuilder().setStringValue(sourceUri).build()); + params.put("write_disposition", Value.newBuilder().setStringValue("APPEND").build()); + params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build()); + params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build()); + params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(datasetName) + .setDisplayName(displayName) + .setDataSourceId("google_cloud_storage") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + CreateCloudStorageTransfer.createCloudStorageTransfer(PROJECT_ID, transferConfig); + String result = bout.toString(); + name = result.substring(result.indexOf(":") + 1, result.length() - 1); + assertThat(result).contains("Cloud storage transfer created successfully :"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/DeleteTransferConfigIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/DeleteTransferConfigIT.java new file mode 100644 index 00000000..33ad1135 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/DeleteTransferConfigIT.java @@ -0,0 +1,132 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest; +import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient; +import com.google.cloud.bigquery.datatransfer.v1.ProjectName; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class DeleteTransferConfigIT { + + private static final Logger LOG = Logger.getLogger(DeleteTransferConfigIT.class.getName()); + private BigQuery bigquery; + private ByteArrayOutputStream bout; + private String name; + private String displayName; + private String datasetName; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } + + @Before + public void setUp() throws IOException { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + + displayName = "MY_SCHEDULE_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); + datasetName = "MY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); + // create a temporary dataset + bigquery = BigQueryOptions.getDefaultInstance().getService(); + bigquery.create(DatasetInfo.of(datasetName)); + + // create a scheduled query + String query = + "SELECT CURRENT_TIMESTAMP() as current_time, @run_time as intended_run_time, " + + "@run_date as intended_run_date, 17 as some_integer"; + String destinationTableName = + "MY_DESTINATION_TABLE_" + UUID.randomUUID().toString().substring(0, 8) + "_{run_date}"; + Map params = new HashMap<>(); + params.put("query", Value.newBuilder().setStringValue(query).build()); + params.put( + "destination_table_name_template", + Value.newBuilder().setStringValue(destinationTableName).build()); + params.put("write_disposition", Value.newBuilder().setStringValue("WRITE_TRUNCATE").build()); + params.put("partitioning_field", Value.newBuilder().setStringValue("").build()); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setDestinationDatasetId(datasetName) + .setDisplayName(displayName) + .setDataSourceId("scheduled_query") + .setParams(Struct.newBuilder().putAllFields(params).build()) + .setSchedule("every 24 hours") + .build(); + try (DataTransferServiceClient dataTransferServiceClient = DataTransferServiceClient.create()) { + ProjectName parent = ProjectName.of(PROJECT_ID); + CreateTransferConfigRequest request = + CreateTransferConfigRequest.newBuilder() + .setParent(parent.toString()) + .setTransferConfig(transferConfig) + .build(); + name = dataTransferServiceClient.createTransferConfig(request).getName(); + System.out.println("Transfer config created successfully :" + name); + } + } + + @After + public void tearDown() { + // delete a temporary dataset + bigquery.delete(datasetName, BigQuery.DatasetDeleteOption.deleteContents()); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testDeleteTransferConfig() throws IOException { + // delete scheduled query that was just created + DeleteTransferConfig.deleteTransferConfig(name); + assertThat(bout.toString()).contains("Transfer config deleted successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/DisableTransferConfigIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/DisableTransferConfigIT.java new file mode 100644 index 00000000..5934d1e5 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/DisableTransferConfigIT.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class DisableTransferConfigIT { + + private static final Logger LOG = Logger.getLogger(DisableTransferConfigIT.class.getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String CONFIG_NAME = requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testDisableTransferConfig() throws IOException { + TransferConfig transferConfig = + TransferConfig.newBuilder().setName(CONFIG_NAME).setDisabled(true).build(); + FieldMask updateMask = FieldMaskUtil.fromString("disabled"); + DisableTransferConfig.disableTransferConfig(transferConfig, updateMask); + assertThat(bout.toString()).contains("Transfer config disabled successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/ReEnableTransferConfigIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/ReEnableTransferConfigIT.java new file mode 100644 index 00000000..dcda4884 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/ReEnableTransferConfigIT.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertNotNull; + +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ReEnableTransferConfigIT { + + private static final Logger LOG = Logger.getLogger(ReEnableTransferConfigIT.class.getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String CONFIG_NAME = requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testReEnableTransferConfig() throws IOException { + TransferConfig transferConfig = + TransferConfig.newBuilder().setName(CONFIG_NAME).setDisabled(false).build(); + FieldMask updateMask = FieldMaskUtil.fromString("disabled"); + ReEnableTransferConfig.reEnableTransferConfig(transferConfig, updateMask); + assertThat(bout.toString()).contains("Transfer config reenable successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/ScheduleBackFillIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/ScheduleBackFillIT.java new file mode 100644 index 00000000..aaf0403a --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/ScheduleBackFillIT.java @@ -0,0 +1,103 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.datatransfer.v1.ScheduleOptions; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.common.collect.ImmutableList; +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.threeten.bp.Clock; +import org.threeten.bp.Instant; +import org.threeten.bp.temporal.ChronoUnit; + +public class ScheduleBackFillIT { + + private static final Logger LOG = Logger.getLogger(ScheduleBackFillIT.class.getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String CONFIG_NAME = requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testScheduleBackFill() throws IOException { + Clock clock = Clock.systemDefaultZone(); + Instant instant = clock.instant(); + Timestamp startDate = + Timestamp.newBuilder() + .setSeconds(instant.getEpochSecond()) + .setNanos(instant.getNano()) + .build(); + Timestamp endDate = + Timestamp.newBuilder() + .setSeconds(instant.plus(10, ChronoUnit.DAYS).getEpochSecond()) + .setNanos(instant.plus(10, ChronoUnit.DAYS).getNano()) + .build(); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setName(CONFIG_NAME) + .setScheduleOptions( + ScheduleOptions.newBuilder().setStartTime(startDate).setEndTime(endDate).build()) + .build(); + FieldMask updateMask = FieldMaskUtil.fromStringList(ImmutableList.of("start_time", "end_time")); + ScheduleBackFill.scheduleBackFill(transferConfig, updateMask); + assertThat(bout.toString()).contains("Schedule backfill updated successfully :"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateCredentialsIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateCredentialsIT.java new file mode 100644 index 00000000..a4ab53ca --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateCredentialsIT.java @@ -0,0 +1,82 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquerydatatransfer; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class UpdateCredentialsIT { + + private static final Logger LOG = Logger.getLogger(UpdateCredentialsIT.class.getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String CONFIG_NAME = requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + private static final String SERVICE_ACCOUNT = requireEnvVar("DTS_UPDATED_SERVICE_ACCOUNT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("DTS_TRANSFER_CONFIG_NAME"); + requireEnvVar("DTS_UPDATED_SERVICE_ACCOUNT"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + LOG.log(Level.INFO, bout.toString()); + } + + @Test + public void testUpdateCredentials() throws IOException { + TransferConfig transferConfig = TransferConfig.newBuilder().setName(CONFIG_NAME).build(); + FieldMask updateMask = FieldMaskUtil.fromString("service_account_name"); + UpdateCredentials.updateCredentials(transferConfig, SERVICE_ACCOUNT, updateMask); + assertThat(bout.toString()).contains("Credentials updated successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateTransferConfigIT.java b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateTransferConfigIT.java index bbba090b..4da497a9 100644 --- a/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateTransferConfigIT.java +++ b/samples/snippets/src/test/java/com/example/bigquerydatatransfer/UpdateTransferConfigIT.java @@ -19,6 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.datatransfer.v1.TransferConfig; +import com.google.protobuf.FieldMask; +import com.google.protobuf.util.FieldMaskUtil; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; @@ -31,7 +34,7 @@ public class UpdateTransferConfigIT { - private static final Logger LOG = Logger.getLogger(DeleteScheduledQueryIT.class.getName()); + private static final Logger LOG = Logger.getLogger(UpdateTransferConfigIT.class.getName()); private ByteArrayOutputStream bout; private PrintStream out; private PrintStream originalPrintStream; @@ -69,7 +72,13 @@ public void tearDown() { @Test public void testUpdateTransferConfig() throws IOException { - UpdateTransferConfig.updateTransferConfig(CONFIG_NAME); + TransferConfig transferConfig = + TransferConfig.newBuilder() + .setName(CONFIG_NAME) + .setDisplayName("UPDATED_DISPLAY_NAME") + .build(); + FieldMask updateMask = FieldMaskUtil.fromString("display_name"); + UpdateTransferConfig.updateTransferConfig(transferConfig, updateMask); assertThat(bout.toString()).contains("Transfer config updated successfully"); } } diff --git a/synth.metadata b/synth.metadata index d3650184..135e4e6b 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,7 +11,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquerydatatransfer.git", - "sha": "08f0a9758c17e016080f4729bfad34b447d7cae7" + "sha": "d9c7a5c6b57b3a3564457d6d68d645bc20ef4b5b" } }, { @@ -26,7 +26,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "80003a3de2d8a75f5b47cb2e77e018f7f0f776cc" + "sha": "5a506ec8765cc04f7e29f888b8e9b257d9a7ae11" } } ], @@ -47,10 +47,14 @@ ".github/ISSUE_TEMPLATE/feature_request.md", ".github/ISSUE_TEMPLATE/support_request.md", ".github/PULL_REQUEST_TEMPLATE.md", + ".github/readme/synth.py", ".github/release-please.yml", + ".github/snippet-bot.yml", ".github/trusted-contribution.yml", + ".github/workflows/approve-readme.yaml", ".github/workflows/auto-release.yaml", ".github/workflows/ci.yaml", + ".github/workflows/formatting.yaml", ".github/workflows/samples.yaml", ".kokoro/build.bat", ".kokoro/coerce_logs.sh", @@ -58,6 +62,7 @@ ".kokoro/common.sh", ".kokoro/continuous/common.cfg", ".kokoro/continuous/java8.cfg", + ".kokoro/continuous/readme.cfg", ".kokoro/dependencies.sh", ".kokoro/linkage-monitor.sh", ".kokoro/nightly/common.cfg", @@ -79,6 +84,7 @@ ".kokoro/presubmit/java8.cfg", ".kokoro/presubmit/linkage-monitor.cfg", ".kokoro/presubmit/lint.cfg", + ".kokoro/readme.sh", ".kokoro/release/bump_snapshot.cfg", ".kokoro/release/common.cfg", ".kokoro/release/common.sh", @@ -96,7 +102,6 @@ "CODE_OF_CONDUCT.md", "CONTRIBUTING.md", "LICENSE", - "README.md", "codecov.yaml", "google-cloud-bigquerydatatransfer/src/main/java/com/google/cloud/bigquery/datatransfer/v1/DataTransferServiceClient.java", "google-cloud-bigquerydatatransfer/src/main/java/com/google/cloud/bigquery/datatransfer/v1/DataTransferServiceSettings.java", diff --git a/versions.txt b/versions.txt index eb7e7180..07f5bc03 100644 --- a/versions.txt +++ b/versions.txt @@ -1,6 +1,6 @@ # Format: # module:released-version:current-version -proto-google-cloud-bigquerydatatransfer-v1:1.0.12:1.0.12 -grpc-google-cloud-bigquerydatatransfer-v1:1.0.12:1.0.12 -google-cloud-bigquerydatatransfer:1.0.12:1.0.12 +proto-google-cloud-bigquerydatatransfer-v1:1.0.13:1.0.13 +grpc-google-cloud-bigquerydatatransfer-v1:1.0.13:1.0.13 +google-cloud-bigquerydatatransfer:1.0.13:1.0.13