From 348ac56128f7af782d51a72efd001d3c45dd771a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 29 Mar 2021 14:58:01 -0700 Subject: [PATCH 01/17] chore: updates to pycqa/flake8, coverage (#127) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/7b91e2c4-9b0c-4685-ad6f-3280b28a36ba/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/86ed43d4f56e6404d068e62e497029018879c771 Source-Link: https://github.com/googleapis/synthtool/commit/f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 --- .pre-commit-config.yaml | 2 +- noxfile.py | 22 ++++++++++++++-------- setup.py | 2 +- synth.metadata | 4 ++-- testing/constraints-3.6.txt | 2 +- 5 files changed, 19 insertions(+), 13 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b15..32302e48 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/noxfile.py b/noxfile.py index 9249c5b9..8e9b9413 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -84,13 +87,15 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -117,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -141,10 +149,8 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: diff --git a/setup.py b/setup.py index 79211a5c..d0af548c 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "libcst >= 0.2.5", "proto-plus >= 1.4.0", diff --git a/synth.metadata b/synth.metadata index 843b2ee1..29f0b919 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "79c8dd7ee768292f933012d3a69a5b4676404cda" + "sha": "86ed43d4f56e6404d068e62e497029018879c771" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "79c8dd7ee768292f933012d3a69a5b4676404cda" + "sha": "86ed43d4f56e6404d068e62e497029018879c771" } } ], diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 421f979e..a732223e 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.0 +google-api-core==1.22.2 grpc-google-iam-v1==0.12.3 libcst==0.2.5 proto-plus==1.4.0 \ No newline at end of file From 28744140c32f778b1580d93a771c6f55e6457340 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 31 Mar 2021 12:26:38 -0700 Subject: [PATCH 02/17] build(python): update docfx job to use new plugin (#134) * changes without context * build(python): update docfx job to use new plugin Source-Author: Dan Lee <71398022+dandhlee@users.noreply.github.com> Source-Date: Tue Mar 30 19:36:37 2021 -0400 Source-Repo: googleapis/synthtool Source-Sha: 4501974ad08b5d693311457e2ea4ce845676e329 Source-Link: https://github.com/googleapis/synthtool/commit/4501974ad08b5d693311457e2ea4ce845676e329 --- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- noxfile.py | 4 +--- synth.metadata | 6 +++--- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 8bc01583..be0bdd8e 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -140,7 +140,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerSerializationClient", + "PolicyTagManagerClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", ) diff --git a/noxfile.py b/noxfile.py index 8e9b9413..0eb433e2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -211,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/synth.metadata b/synth.metadata index 29f0b919..d3b001b4 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "0c69bc2fbae593f62c543c5a15dbe810467b7510" + "sha": "348ac56128f7af782d51a72efd001d3c45dd771a" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "4501974ad08b5d693311457e2ea4ce845676e329" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "4501974ad08b5d693311457e2ea4ce845676e329" } } ], From b2fb4858966809914c915e11435d29e0fa4442e8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 1 Apr 2021 19:28:10 +0200 Subject: [PATCH 03/17] chore(deps): update dependency google-cloud-datacatalog to v3.1.1 (#130) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index e6fae367..3a507e77 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.1.0 +google-cloud-datacatalog==3.1.1 From 875c645ebfc01d1c25819a89486cec197d08c563 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 7 Apr 2021 09:26:04 -0700 Subject: [PATCH 04/17] chore: add license headers (#136) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/84f8868d-ac4d-4151-a388-e762b5d35837/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc --- .pre-commit-config.yaml | 14 ++++++++++++++ docs/conf.py | 13 +++++++++++++ google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- synth.metadata | 6 +++--- 4 files changed, 32 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32302e48..8912e9b5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/docs/conf.py b/docs/conf.py index dba56c04..d225b87e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-datacatalog documentation build configuration file # diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index be0bdd8e..16534418 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,7 +103,6 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", - "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -141,6 +140,7 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "DataCatalogClient", ) diff --git a/synth.metadata b/synth.metadata index d3b001b4..257b4ed7 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "348ac56128f7af782d51a72efd001d3c45dd771a" + "sha": "b2fb4858966809914c915e11435d29e0fa4442e8" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4501974ad08b5d693311457e2ea4ce845676e329" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4501974ad08b5d693311457e2ea4ce845676e329" + "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" } } ], From 1614c6c99ec70355e7a9caa93d4e866f64b1df50 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 13 Apr 2021 07:55:50 -0700 Subject: [PATCH 05/17] chore: add constraints file check for python samples (#137) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add constraints file check for python samples This is the sibling PR to https://github.com/GoogleCloudPlatform/python-docs-samples/pull/5611 and this is the issue opened for it https://github.com/GoogleCloudPlatform/python-docs-samples/issues/5549 If you look at the files in [this example repo](https://github.com/leahecole/testrepo-githubapp/pull/31/files), you'll see that renovate successfully opened a PR on three constraints files in `samples` directories and subdirectories, and properly ignored `constraints` files at the root level cc @tswast TODO: - [x] update renovate to check for samples/constraints.txt dependency updates - [x] run lint locally to double check that I'm not introducing lint error Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Fri Apr 9 22:50:04 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 0a071b3460344886297a304253bf924aa68ddb7e Source-Link: https://github.com/googleapis/synthtool/commit/0a071b3460344886297a304253bf924aa68ddb7e --- .github/header-checker-lint.yml | 2 +- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- renovate.json | 5 ++++- samples/snippets/noxfile.py | 10 ++++++++-- synth.metadata | 6 +++--- 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml index fc281c05..6fe78aa7 100644 --- a/.github/header-checker-lint.yml +++ b/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 16534418..be0bdd8e 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,6 +103,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", + "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -140,7 +141,6 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", - "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerSerializationClient", ) diff --git a/renovate.json b/renovate.json index f08bc22c..c0489556 100644 --- a/renovate.json +++ b/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 97bf7da8..956cdf4f 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -172,10 +172,16 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) diff --git a/synth.metadata b/synth.metadata index 257b4ed7..8328c7d0 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "b2fb4858966809914c915e11435d29e0fa4442e8" + "sha": "875c645ebfc01d1c25819a89486cec197d08c563" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } } ], From 8c157542045cfdebe31876cfc2448590b48775c2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 13 Apr 2021 17:06:04 +0200 Subject: [PATCH 06/17] chore(deps): update dependency pytest to v6.2.3 (#138) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==6.0.1` -> `==6.2.3` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/compatibility-slim/6.0.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/confidence-slim/6.0.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pytest-dev/pytest ### [`v6.2.3`](https://togithub.com/pytest-dev/pytest/releases/6.2.3) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.2...6.2.3) # pytest 6.2.3 (2021-04-03) ## Bug Fixes - [#​8414](https://togithub.com/pytest-dev/pytest/issues/8414): pytest used to create directories under `/tmp` with world-readable permissions. This means that any user in the system was able to read information written by tests in temporary directories (such as those created by the `tmp_path`/`tmpdir` fixture). Now the directories are created with private permissions. pytest used silenty use a pre-existing `/tmp/pytest-of-` directory, even if owned by another user. This means another user could pre-create such a directory and gain control of another user\\'s temporary directory. Now such a condition results in an error. ### [`v6.2.2`](https://togithub.com/pytest-dev/pytest/releases/6.2.2) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.1...6.2.2) # pytest 6.2.2 (2021-01-25) ## Bug Fixes - [#​8152](https://togithub.com/pytest-dev/pytest/issues/8152): Fixed "(<Skipped instance>)" being shown as a skip reason in the verbose test summary line when the reason is empty. - [#​8249](https://togithub.com/pytest-dev/pytest/issues/8249): Fix the `faulthandler` plugin for occasions when running with `twisted.logger` and using `pytest --capture=no`. ### [`v6.2.1`](https://togithub.com/pytest-dev/pytest/releases/6.2.1) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.0...6.2.1) # pytest 6.2.1 (2020-12-15) ## Bug Fixes - [#​7678](https://togithub.com/pytest-dev/pytest/issues/7678): Fixed bug where `ImportPathMismatchError` would be raised for files compiled in the host and loaded later from an UNC mounted path (Windows). - [#​8132](https://togithub.com/pytest-dev/pytest/issues/8132): Fixed regression in `approx`: in 6.2.0 `approx` no longer raises `TypeError` when dealing with non-numeric types, falling back to normal comparison. Before 6.2.0, array types like tf.DeviceArray fell through to the scalar case, and happened to compare correctly to a scalar if they had only one element. After 6.2.0, these types began failing, because they inherited neither from standard Python number hierarchy nor from `numpy.ndarray`. `approx` now converts arguments to `numpy.ndarray` if they expose the array protocol and are not scalars. This treats array-like objects like numpy arrays, regardless of size. ### [`v6.2.0`](https://togithub.com/pytest-dev/pytest/releases/6.2.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.2...6.2.0) # pytest 6.2.0 (2020-12-12) ## Breaking Changes - [#​7808](https://togithub.com/pytest-dev/pytest/issues/7808): pytest now supports python3.6+ only. ## Deprecations - [#​7469](https://togithub.com/pytest-dev/pytest/issues/7469): Directly constructing/calling the following classes/functions is now deprecated: - `_pytest.cacheprovider.Cache` - `_pytest.cacheprovider.Cache.for_config()` - `_pytest.cacheprovider.Cache.clear_cache()` - `_pytest.cacheprovider.Cache.cache_dir_from_config()` - `_pytest.capture.CaptureFixture` - `_pytest.fixtures.FixtureRequest` - `_pytest.fixtures.SubRequest` - `_pytest.logging.LogCaptureFixture` - `_pytest.pytester.Pytester` - `_pytest.pytester.Testdir` - `_pytest.recwarn.WarningsRecorder` - `_pytest.recwarn.WarningsChecker` - `_pytest.tmpdir.TempPathFactory` - `_pytest.tmpdir.TempdirFactory` These have always been considered private, but now issue a deprecation warning, which may become a hard error in pytest 7.0.0. - [#​7530](https://togithub.com/pytest-dev/pytest/issues/7530): The `--strict` command-line option has been deprecated, use `--strict-markers` instead. We have plans to maybe in the future to reintroduce `--strict` and make it an encompassing flag for all strictness related options (`--strict-markers` and `--strict-config` at the moment, more might be introduced in the future). - [#​7988](https://togithub.com/pytest-dev/pytest/issues/7988): The `@pytest.yield_fixture` decorator/function is now deprecated. Use pytest.fixture instead. `yield_fixture` has been an alias for `fixture` for a very long time, so can be search/replaced safely. ## Features - [#​5299](https://togithub.com/pytest-dev/pytest/issues/5299): pytest now warns about unraisable exceptions and unhandled thread exceptions that occur in tests on Python>=3.8. See unraisable for more information. - [#​7425](https://togithub.com/pytest-dev/pytest/issues/7425): New pytester fixture, which is identical to testdir but its methods return pathlib.Path when appropriate instead of `py.path.local`. This is part of the movement to use pathlib.Path objects internally, in order to remove the dependency to `py` in the future. Internally, the old Testdir <\_pytest.pytester.Testdir> is now a thin wrapper around Pytester <\_pytest.pytester.Pytester>, preserving the old interface. - [#​7695](https://togithub.com/pytest-dev/pytest/issues/7695): A new hook was added, pytest_markeval_namespace which should return a dictionary. This dictionary will be used to augment the "global" variables available to evaluate skipif/xfail/xpass markers. Pseudo example `conftest.py`: ```{.sourceCode .python} def pytest_markeval_namespace(): return {"color": "red"} ``` `test_func.py`: ```{.sourceCode .python} @​pytest.mark.skipif("color == 'blue'", reason="Color is not red") def test_func(): assert False ``` - [#​8006](https://togithub.com/pytest-dev/pytest/issues/8006): It is now possible to construct a ~pytest.MonkeyPatch object directly as `pytest.MonkeyPatch()`, in cases when the monkeypatch fixture cannot be used. Previously some users imported it from the private \_pytest.monkeypatch.MonkeyPatch namespace. Additionally, MonkeyPatch.context <pytest.MonkeyPatch.context> is now a classmethod, and can be used as `with MonkeyPatch.context() as mp: ...`. This is the recommended way to use `MonkeyPatch` directly, since unlike the `monkeypatch` fixture, an instance created directly is not `undo()`-ed automatically. ## Improvements - [#​1265](https://togithub.com/pytest-dev/pytest/issues/1265): Added an `__str__` implementation to the ~pytest.pytester.LineMatcher class which is returned from `pytester.run_pytest().stdout` and similar. It returns the entire output, like the existing `str()` method. - [#​2044](https://togithub.com/pytest-dev/pytest/issues/2044): Verbose mode now shows the reason that a test was skipped in the test's terminal line after the "SKIPPED", "XFAIL" or "XPASS". - [#​7469](https://togithub.com/pytest-dev/pytest/issues/7469) The types of builtin pytest fixtures are now exported so they may be used in type annotations of test functions. The newly-exported types are: - `pytest.FixtureRequest` for the request fixture. - `pytest.Cache` for the cache fixture. - `pytest.CaptureFixture[str]` for the capfd and capsys fixtures. - `pytest.CaptureFixture[bytes]` for the capfdbinary and capsysbinary fixtures. - `pytest.LogCaptureFixture` for the caplog fixture. - `pytest.Pytester` for the pytester fixture. - `pytest.Testdir` for the testdir fixture. - `pytest.TempdirFactory` for the tmpdir_factory fixture. - `pytest.TempPathFactory` for the tmp_path_factory fixture. - `pytest.MonkeyPatch` for the monkeypatch fixture. - `pytest.WarningsRecorder` for the recwarn fixture. Constructing them is not supported (except for MonkeyPatch); they are only meant for use in type annotations. Doing so will emit a deprecation warning, and may become a hard-error in pytest 7.0. Subclassing them is also not supported. This is not currently enforced at runtime, but is detected by type-checkers such as mypy. - [#​7527](https://togithub.com/pytest-dev/pytest/issues/7527): When a comparison between namedtuple <collections.namedtuple> instances of the same type fails, pytest now shows the differing field names (possibly nested) instead of their indexes. - [#​7615](https://togithub.com/pytest-dev/pytest/issues/7615): Node.warn <\_pytest.nodes.Node.warn> now permits any subclass of Warning, not just PytestWarning <pytest.PytestWarning>. - [#​7701](https://togithub.com/pytest-dev/pytest/issues/7701): Improved reporting when using `--collected-only`. It will now show the number of collected tests in the summary stats. - [#​7710](https://togithub.com/pytest-dev/pytest/issues/7710): Use strict equality comparison for non-numeric types in pytest.approx instead of raising TypeError. This was the undocumented behavior before 3.7, but is now officially a supported feature. - [#​7938](https://togithub.com/pytest-dev/pytest/issues/7938): New `--sw-skip` argument which is a shorthand for `--stepwise-skip`. - [#​8023](https://togithub.com/pytest-dev/pytest/issues/8023): Added `'node_modules'` to default value for norecursedirs. - [#​8032](https://togithub.com/pytest-dev/pytest/issues/8032): doClassCleanups <unittest.TestCase.doClassCleanups> (introduced in unittest in Python and 3.8) is now called appropriately. ## Bug Fixes - [#​4824](https://togithub.com/pytest-dev/pytest/issues/4824): Fixed quadratic behavior and improved performance of collection of items using autouse fixtures and xunit fixtures. - [#​7758](https://togithub.com/pytest-dev/pytest/issues/7758): Fixed an issue where some files in packages are getting lost from `--lf` even though they contain tests that failed. Regressed in pytest 5.4.0. - [#​7911](https://togithub.com/pytest-dev/pytest/issues/7911): Directories created by by tmp_path and tmpdir are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites. - [#​7913](https://togithub.com/pytest-dev/pytest/issues/7913): Fixed a crash or hang in pytester.spawn <\_pytest.pytester.Pytester.spawn> when the readline module is involved. - [#​7951](https://togithub.com/pytest-dev/pytest/issues/7951): Fixed handling of recursive symlinks when collecting tests. - [#​7981](https://togithub.com/pytest-dev/pytest/issues/7981): Fixed symlinked directories not being followed during collection. Regressed in pytest 6.1.0. - [#​8016](https://togithub.com/pytest-dev/pytest/issues/8016): Fixed only one doctest being collected when using `pytest --doctest-modules path/to/an/__init__.py`. ## Improved Documentation - [#​7429](https://togithub.com/pytest-dev/pytest/issues/7429): Add more information and use cases about skipping doctests. - [#​7780](https://togithub.com/pytest-dev/pytest/issues/7780): Classes which should not be inherited from are now marked `final class` in the API reference. - [#​7872](https://togithub.com/pytest-dev/pytest/issues/7872): `_pytest.config.argparsing.Parser.addini()` accepts explicit `None` and `"string"`. - [#​7878](https://togithub.com/pytest-dev/pytest/issues/7878): In pull request section, ask to commit after editing changelog and authors file. ## Trivial/Internal Changes - [#​7802](https://togithub.com/pytest-dev/pytest/issues/7802): The `attrs` dependency requirement is now >=19.2.0 instead of >=17.4.0. - [#​8014](https://togithub.com/pytest-dev/pytest/issues/8014): .pyc files created by pytest's assertion rewriting now conform to the newer PEP-552 format on Python>=3.7. (These files are internal and only interpreted by pytest itself.) ### [`v6.1.2`](https://togithub.com/pytest-dev/pytest/releases/6.1.2) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.1...6.1.2) # pytest 6.1.2 (2020-10-28) ## Bug Fixes - [#​7758](https://togithub.com/pytest-dev/pytest/issues/7758): Fixed an issue where some files in packages are getting lost from `--lf` even though they contain tests that failed. Regressed in pytest 5.4.0. - [#​7911](https://togithub.com/pytest-dev/pytest/issues/7911): Directories created by tmpdir are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites. ## Improved Documentation - [#​7815](https://togithub.com/pytest-dev/pytest/issues/7815): Improve deprecation warning message for `pytest._fillfuncargs()`. ### [`v6.1.1`](https://togithub.com/pytest-dev/pytest/releases/6.1.1) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.0...6.1.1) # pytest 6.1.1 (2020-10-03) ## Bug Fixes - [#​7807](https://togithub.com/pytest-dev/pytest/issues/7807): Fixed regression in pytest 6.1.0 causing incorrect rootdir to be determined in some non-trivial cases where parent directories have config files as well. - [#​7814](https://togithub.com/pytest-dev/pytest/issues/7814): Fixed crash in header reporting when testpaths is used and contains absolute paths (regression in 6.1.0). ### [`v6.1.0`](https://togithub.com/pytest-dev/pytest/releases/6.1.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.0.2...6.1.0) # pytest 6.1.0 (2020-09-26) ## Breaking Changes - [#​5585](https://togithub.com/pytest-dev/pytest/issues/5585): As per our policy, the following features which have been deprecated in the 5.X series are now removed: - The `funcargnames` read-only property of `FixtureRequest`, `Metafunc`, and `Function` classes. Use `fixturenames` attribute. - `@pytest.fixture` no longer supports positional arguments, pass all arguments by keyword instead. - Direct construction of `Node` subclasses now raise an error, use `from_parent` instead. - The default value for `junit_family` has changed to `xunit2`. If you require the old format, add `junit_family=xunit1` to your configuration file. - The `TerminalReporter` no longer has a `writer` attribute. Plugin authors may use the public functions of the `TerminalReporter` instead of accessing the `TerminalWriter` object directly. - The `--result-log` option has been removed. Users are recommended to use the [pytest-reportlog](https://togithub.com/pytest-dev/pytest-reportlog) plugin instead. For more information consult [Deprecations and Removals](https://docs.pytest.org/en/stable/deprecations.html) in the docs. ## Deprecations - [#​6981](https://togithub.com/pytest-dev/pytest/issues/6981): The `pytest.collect` module is deprecated: all its names can be imported from `pytest` directly. - [#​7097](https://togithub.com/pytest-dev/pytest/issues/7097): The `pytest._fillfuncargs` function is deprecated. This function was kept for backward compatibility with an older plugin. It's functionality is not meant to be used directly, but if you must replace it, use function.\_request.\_fillfixtures() instead, though note this is not a public API and may break in the future. - [#​7210](https://togithub.com/pytest-dev/pytest/issues/7210): The special `-k '-expr'` syntax to `-k` is deprecated. Use `-k 'not expr'` instead. The special `-k 'expr:'` syntax to `-k` is deprecated. Please open an issue if you use this and want a replacement. - [#​7255](https://togithub.com/pytest-dev/pytest/issues/7255): The pytest_warning_captured <\_pytest.hookspec.pytest_warning_captured> hook is deprecated in favor of pytest_warning_recorded <\_pytest.hookspec.pytest_warning_recorded>, and will be removed in a future version. - [#​7648](https://togithub.com/pytest-dev/pytest/issues/7648): The `gethookproxy()` and `isinitpath()` methods of `FSCollector` and `Package` are deprecated; use `self.session.gethookproxy()` and `self.session.isinitpath()` instead. This should work on all pytest versions. ## Features - [#​7667](https://togithub.com/pytest-dev/pytest/issues/7667): New `--durations-min` command-line flag controls the minimal duration for inclusion in the slowest list of tests shown by `--durations`. Previously this was hard-coded to `0.005s`. ## Improvements - [#​6681](https://togithub.com/pytest-dev/pytest/issues/6681): Internal pytest warnings issued during the early stages of initialization are now properly handled and can filtered through filterwarnings or `--pythonwarnings/-W`. This also fixes a number of long standing issues: [#​2891](https://togithub.com/pytest-dev/pytest/issues/2891), [#​7620](https://togithub.com/pytest-dev/pytest/issues/7620), [#​7426](https://togithub.com/pytest-dev/pytest/issues/7426). - [#​7572](https://togithub.com/pytest-dev/pytest/issues/7572): When a plugin listed in `required_plugins` is missing or an unknown config key is used with `--strict-config`, a simple error message is now shown instead of a stacktrace. - [#​7685](https://togithub.com/pytest-dev/pytest/issues/7685): Added two new attributes rootpath <\_pytest.config.Config.rootpath> and inipath <\_pytest.config.Config.inipath> to Config <\_pytest.config.Config>. These attributes are pathlib.Path versions of the existing rootdir <\_pytest.config.Config.rootdir> and inifile <\_pytest.config.Config.inifile> attributes, and should be preferred over them when possible. - [#​7780](https://togithub.com/pytest-dev/pytest/issues/7780): Public classes which are not designed to be inherited from are now marked [@​final](https://docs.python.org/3/library/typing.html#typing.final). Code which inherits from these classes will trigger a type-checking (e.g. mypy) error, but will still work in runtime. Currently the `final` designation does not appear in the API Reference but hopefully will in the future. ## Bug Fixes - [#​1953](https://togithub.com/pytest-dev/pytest/issues/1953): Fixed error when overwriting a parametrized fixture, while also reusing the super fixture value. ```{.sourceCode .python} ``` ### conftest.py import pytest @​pytest.fixture(params=[1, 2]) def foo(request): return request.param ### test_foo.py import pytest @​pytest.fixture def foo(foo): return foo * 2 ``` - [#​4984](https://togithub.com/pytest-dev/pytest/issues/4984): Fixed an internal error crash with `IndexError: list index out of range` when collecting a module which starts with a decorated function, the decorator raises, and assertion rewriting is enabled. - [#​7591](https://togithub.com/pytest-dev/pytest/issues/7591): pylint shouldn't complain anymore about unimplemented abstract methods when inheriting from File <non-python tests>. - [#​7628](https://togithub.com/pytest-dev/pytest/issues/7628): Fixed test collection when a full path without a drive letter was passed to pytest on Windows (for example `\projects\tests\test.py` instead of `c:\projects\tests\pytest.py`). - [#​7638](https://togithub.com/pytest-dev/pytest/issues/7638): Fix handling of command-line options that appear as paths but trigger an OS-level syntax error on Windows, such as the options used internally by `pytest-xdist`. - [#​7742](https://togithub.com/pytest-dev/pytest/issues/7742): Fixed INTERNALERROR when accessing locals / globals with faulty `exec`. ## Improved Documentation - [#​1477](https://togithub.com/pytest-dev/pytest/issues/1477): Removed faq.rst and its reference in contents.rst. ## Trivial/Internal Changes - [#​7536](https://togithub.com/pytest-dev/pytest/issues/7536): The internal `junitxml` plugin has rewritten to use `xml.etree.ElementTree`. The order of attributes in XML elements might differ. Some unneeded escaping is no longer performed. - [#​7587](https://togithub.com/pytest-dev/pytest/issues/7587): The dependency on the `more-itertools` package has been removed. - [#​7631](https://togithub.com/pytest-dev/pytest/issues/7631): The result type of capfd.readouterr() <\_pytest.capture.CaptureFixture.readouterr> (and similar) is no longer a namedtuple, but should behave like one in all respects. This was done for technical reasons. - [#​7671](https://togithub.com/pytest-dev/pytest/issues/7671): When collecting tests, pytest finds test classes and functions by examining the attributes of python objects (modules, classes and instances). To speed up this process, pytest now ignores builtin attributes (like `__class__`, `__delattr__` and `__new__`) without consulting the python_classes and python_functions configuration options and without passing them to plugins using the pytest_pycollect_makeitem <\_pytest.hookspec.pytest_pycollect_makeitem> hook. ### [`v6.0.2`](https://togithub.com/pytest-dev/pytest/releases/6.0.2) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.0.1...6.0.2) # pytest 6.0.2 (2020-09-04) ## Bug Fixes - [#​7148](https://togithub.com/pytest-dev/pytest/issues/7148): Fixed `--log-cli` potentially causing unrelated `print` output to be swallowed. - [#​7672](https://togithub.com/pytest-dev/pytest/issues/7672): Fixed log-capturing level restored incorrectly if `caplog.set_level` is called more than once. - [#​7686](https://togithub.com/pytest-dev/pytest/issues/7686): Fixed NotSetType.token being used as the parameter ID when the parametrization list is empty. Regressed in pytest 6.0.0. - [#​7707](https://togithub.com/pytest-dev/pytest/issues/7707): Fix internal error when handling some exceptions that contain multiple lines or the style uses multiple lines (`--tb=line` for example).
--- ### Configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-datacatalog). --- samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 7e460c8c..f7e3ec09 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.0.1 +pytest==6.2.3 From 6772851d12a6432eaa662db468df233dbd3195f3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 15 Apr 2021 14:11:33 -0700 Subject: [PATCH 07/17] ci: use secret manager for PyPI token (#140) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * docs(python): add empty lines between methods Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Apr 14 14:41:09 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 721339ab60a6eb63b889978b3d9b295dcb3be370 Source-Link: https://github.com/googleapis/synthtool/commit/721339ab60a6eb63b889978b3d9b295dcb3be370 * build: use PyPI API token in secret manager Migrate python libraries onto the PyPI API token stored in secret manager. A PyPI API token is limited in scope to uploading new releases. https://pypi.org/help/#apitoken Verified that this works with [build](https://fusion2.corp.google.com/invocations/14bae126-83fa-4328-8da9-d390ed99315c/targets/cloud-devrel%2Fclient-libraries%2Fpython%2Fgoogleapis%2Fpython-vision%2Frelease%2Frelease;config=default/log) on https://github.com/googleapis/python-vision/pull/136 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Apr 14 17:46:06 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 043cc620d6a6111816d9e09f2a97208565fde958 Source-Link: https://github.com/googleapis/synthtool/commit/043cc620d6a6111816d9e09f2a97208565fde958 --- .kokoro/release.sh | 4 ++-- .kokoro/release/common.cfg | 14 ++------------ docs/_static/custom.css | 13 ++++++++++++- google/cloud/datacatalog_v1beta1/__init__.py | 4 ++-- synth.metadata | 6 +++--- 5 files changed, 21 insertions(+), 20 deletions(-) diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 5a5d271f..4b38dded 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-datacatalog python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 3156ce87..e131fd92 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-datacatalog/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/docs/_static/custom.css b/docs/_static/custom.css index bcd37bbd..b0a29546 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index be0bdd8e..8bc01583 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -140,7 +140,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "PolicyTagManagerClient", ) diff --git a/synth.metadata b/synth.metadata index 8328c7d0..6fe87e5d 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "875c645ebfc01d1c25819a89486cec197d08c563" + "sha": "8c157542045cfdebe31876cfc2448590b48775c2" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" + "sha": "043cc620d6a6111816d9e09f2a97208565fde958" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" + "sha": "043cc620d6a6111816d9e09f2a97208565fde958" } } ], From 87826f7c09fd4af59419bb87160daee3dc3e166e Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 16 Apr 2021 17:24:29 -0400 Subject: [PATCH 08/17] chore: prevent normalization of semver versioning (#141) * chore: prevent normalization of semver versioning * chore: update workaround to make sic work --- setup.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d0af548c..b792fba8 100644 --- a/setup.py +++ b/setup.py @@ -19,6 +19,21 @@ import setuptools +# Disable version normalization performed by setuptools.setup() +try: + # Try the approach of using sic(), added in setuptools 46.1.0 + from setuptools import sic +except ImportError: + # Try the approach of replacing packaging.version.Version + sic = lambda v: v + try: + # setuptools >=39.0.0 uses packaging from setuptools.extern + from setuptools.extern import packaging + except ImportError: + # setuptools <39.0.0 uses packaging from pkg_resources.extern + from pkg_resources.extern import packaging + packaging.version.Version = packaging.version.LegacyVersion + name = "google-cloud-datacatalog" description = "Google Cloud Data Catalog API API client library" version = "3.1.1" @@ -52,7 +67,7 @@ setuptools.setup( name=name, - version=version, + version=sic(version), description=description, long_description=readme, author="Google LLC", From 00f417ae51873cf1d4609585242a9a38abdd9d9c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 27 Apr 2021 07:56:06 -0400 Subject: [PATCH 09/17] chore: migrate to owl bot (#145) --- .github/.OwlBot.lock.yaml | 4 + .github/.OwlBot.yaml | 26 +++ google/cloud/datacatalog_v1beta1/__init__.py | 4 +- synth.py => owlbot.py | 17 +- synth.metadata | 223 ------------------- 5 files changed, 36 insertions(+), 238 deletions(-) create mode 100644 .github/.OwlBot.lock.yaml create mode 100644 .github/.OwlBot.yaml rename synth.py => owlbot.py (76%) delete mode 100644 synth.metadata diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 00000000..29084e8a --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,4 @@ +docker: + digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 + image: gcr.io/repo-automation-bots/owlbot-python:latest + diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 00000000..f7f3a47f --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/datacatalog/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 7313e4e156a911389f02c2fef9413f6b5b22b196 + diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 8bc01583..be0bdd8e 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -140,7 +140,7 @@ "ListTaxonomiesResponse", "LookupEntryRequest", "PolicyTag", - "PolicyTagManagerSerializationClient", + "PolicyTagManagerClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", ) diff --git a/synth.py b/owlbot.py similarity index 76% rename from synth.py rename to owlbot.py index ed4e1f6b..9165e1b2 100644 --- a/synth.py +++ b/owlbot.py @@ -18,22 +18,11 @@ from synthtool import gcp from synthtool.languages import python -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() +default_version = "v1beta1" -# ---------------------------------------------------------------------------- -# Generate datacatalog GAPIC layer -# ---------------------------------------------------------------------------- -versions = ['v1', 'v1beta1'] -for version in versions: - library = gapic.py_library( - service='datacatalog', - version=version, - bazel_target=f"//google/cloud/datacatalog/{version}:datacatalog-{version}-py", - include_protos=True, - ) - +for library in s.get_staging_dirs(default_version): s.move( library, excludes=[ @@ -46,6 +35,8 @@ ], ) +s.remove_staging_dirs() + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index 6fe87e5d..00000000 --- a/synth.metadata +++ /dev/null @@ -1,223 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "8c157542045cfdebe31876cfc2448590b48775c2" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "15c5e21948ff6fbe41f91bdf04f6252f91a12d59", - "internalRef": "364894175" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "datacatalog", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "datacatalog", - "apiVersion": "v1beta1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/datacatalog_v1/data_catalog.rst", - "docs/datacatalog_v1/services.rst", - "docs/datacatalog_v1/types.rst", - "docs/datacatalog_v1beta1/data_catalog.rst", - "docs/datacatalog_v1beta1/policy_tag_manager.rst", - "docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst", - "docs/datacatalog_v1beta1/services.rst", - "docs/datacatalog_v1beta1/types.rst", - "docs/multiprocessing.rst", - "google/cloud/datacatalog/__init__.py", - "google/cloud/datacatalog/py.typed", - "google/cloud/datacatalog_v1/__init__.py", - "google/cloud/datacatalog_v1/proto/common.proto", - "google/cloud/datacatalog_v1/proto/datacatalog.proto", - "google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto", - "google/cloud/datacatalog_v1/proto/schema.proto", - "google/cloud/datacatalog_v1/proto/search.proto", - "google/cloud/datacatalog_v1/proto/table_spec.proto", - "google/cloud/datacatalog_v1/proto/tags.proto", - "google/cloud/datacatalog_v1/proto/timestamps.proto", - "google/cloud/datacatalog_v1/py.typed", - "google/cloud/datacatalog_v1/services/__init__.py", - "google/cloud/datacatalog_v1/services/data_catalog/__init__.py", - "google/cloud/datacatalog_v1/services/data_catalog/async_client.py", - "google/cloud/datacatalog_v1/services/data_catalog/client.py", - "google/cloud/datacatalog_v1/services/data_catalog/pagers.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/base.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1/types/__init__.py", - "google/cloud/datacatalog_v1/types/common.py", - "google/cloud/datacatalog_v1/types/datacatalog.py", - "google/cloud/datacatalog_v1/types/gcs_fileset_spec.py", - "google/cloud/datacatalog_v1/types/schema.py", - "google/cloud/datacatalog_v1/types/search.py", - "google/cloud/datacatalog_v1/types/table_spec.py", - "google/cloud/datacatalog_v1/types/tags.py", - "google/cloud/datacatalog_v1/types/timestamps.py", - "google/cloud/datacatalog_v1beta1/__init__.py", - "google/cloud/datacatalog_v1beta1/proto/common.proto", - "google/cloud/datacatalog_v1beta1/proto/datacatalog.proto", - "google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto", - "google/cloud/datacatalog_v1beta1/proto/policytagmanager.proto", - "google/cloud/datacatalog_v1beta1/proto/policytagmanagerserialization.proto", - "google/cloud/datacatalog_v1beta1/proto/schema.proto", - "google/cloud/datacatalog_v1beta1/proto/search.proto", - "google/cloud/datacatalog_v1beta1/proto/table_spec.proto", - "google/cloud/datacatalog_v1beta1/proto/tags.proto", - "google/cloud/datacatalog_v1beta1/proto/timestamps.proto", - "google/cloud/datacatalog_v1beta1/py.typed", - "google/cloud/datacatalog_v1beta1/services/__init__.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/client.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1beta1/types/__init__.py", - "google/cloud/datacatalog_v1beta1/types/common.py", - "google/cloud/datacatalog_v1beta1/types/datacatalog.py", - "google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py", - "google/cloud/datacatalog_v1beta1/types/policytagmanager.py", - "google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py", - "google/cloud/datacatalog_v1beta1/types/schema.py", - "google/cloud/datacatalog_v1beta1/types/search.py", - "google/cloud/datacatalog_v1beta1/types/table_spec.py", - "google/cloud/datacatalog_v1beta1/types/tags.py", - "google/cloud/datacatalog_v1beta1/types/timestamps.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_datacatalog_v1_keywords.py", - "scripts/fixup_datacatalog_v1beta1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/datacatalog_v1/__init__.py", - "tests/unit/gapic/datacatalog_v1/test_data_catalog.py", - "tests/unit/gapic/datacatalog_v1beta1/__init__.py", - "tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py", - "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py", - "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py" - ] -} \ No newline at end of file From d79c509890f799ca2f180914815bdb4d0c4b95fe Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 28 Apr 2021 11:42:06 -0400 Subject: [PATCH 10/17] chore(revert): revert preventing normalization (#144) reverts previous commit for preventing normalization of versioning --- setup.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/setup.py b/setup.py index b792fba8..d0af548c 100644 --- a/setup.py +++ b/setup.py @@ -19,21 +19,6 @@ import setuptools -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion - name = "google-cloud-datacatalog" description = "Google Cloud Data Catalog API API client library" version = "3.1.1" @@ -67,7 +52,7 @@ setuptools.setup( name=name, - version=sic(version), + version=version, description=description, long_description=readme, author="Google LLC", From bb2765dc219580d91ff82611971631b1a62bd41e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 May 2021 20:15:27 +0200 Subject: [PATCH 11/17] chore(deps): update dependency pytest to v6.2.4 (#152) --- samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index f7e3ec09..95ea1e6a 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.3 +pytest==6.2.4 From 062ec8c6cb797b3678983ef6c42e13ae39859dfb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 May 2021 10:59:28 -0700 Subject: [PATCH 12/17] chore: new owl bot post processor docker image (#155) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:1f18bfeb1629a6fd4c9301d4174c672cae5ac7ba611a5c8d204d6010e61f6f0d Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 5 ++--- .pre-commit-config.yaml | 2 +- CONTRIBUTING.rst | 16 +--------------- noxfile.py | 14 ++------------ 4 files changed, 6 insertions(+), 31 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 29084e8a..bab398e1 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,3 @@ docker: - digest: sha256:cfc0e802701262c211703c468874d767f65dabe6a1a71d0e07bfc8a3d5175f32 - image: gcr.io/repo-automation-bots/owlbot-python:latest - + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:1f18bfeb1629a6fd4c9301d4174c672cae5ac7ba611a5c8d204d6010e61f6f0d diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8912e9b5..1bbd7878 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.1 hooks: - id: flake8 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 631f94f9..e647c0c8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage diff --git a/noxfile.py b/noxfile.py index 0eb433e2..04c230d2 100644 --- a/noxfile.py +++ b/noxfile.py @@ -62,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -131,9 +124,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") From 5f6cefb287e0005cdaf12b54ca1f623d144031aa Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 May 2021 14:00:11 -0400 Subject: [PATCH 13/17] chore: add library type to .repo-metadata.json (#154) --- .repo-metadata.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.repo-metadata.json b/.repo-metadata.json index 11cc1c31..5b6e3c6b 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "", "release_level": "ga", "language": "python", + "library_type": "GAPIC_AUTO", "repo": "googleapis/python-datacatalog", "distribution_name": "google-cloud-datacatalog", "api_id": "datacatalog.googleapis.com", From 2769a1995251fea0ae50c5536c3f4696013ed11b Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Mon, 10 May 2021 11:41:21 -0700 Subject: [PATCH 14/17] chore: add SECURITY.md (#148) Co-authored-by: google-cloud-policy-bot[bot] <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> --- SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..8b58ae9c --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From 028e14f336f96b3ea487034b65ef0b1bc57fa94e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 16 May 2021 11:20:02 +0000 Subject: [PATCH 15/17] chore: new owl bot post processor docker image (#160) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa --- .github/.OwlBot.lock.yaml | 2 +- .pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index bab398e1..864c1765 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:1f18bfeb1629a6fd4c9301d4174c672cae5ac7ba611a5c8d204d6010e61f6f0d + digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1bbd7878..4f00c7cf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + rev: 3.9.2 hooks: - id: flake8 From 85e46e144d32a0d66bc2d7c056453951eb77d592 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 May 2021 02:22:05 +0000 Subject: [PATCH 16/17] chore: upgrade gapic-generator-python to 0.46.3 (#159) PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0a3c7d272d697796db75857bac73905c68e498c3 fix: add async client to %name_%version/init.py chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list feat: support self-signed JWT flow for service accounts chore: enable GAPIC metadata generation chore: sort subpackages in %namespace/%name/init.py --- docs/datacatalog_v1/data_catalog.rst | 1 - docs/datacatalog_v1/policy_tag_manager.rst | 10 + .../policy_tag_manager_serialization.rst | 6 + docs/datacatalog_v1/services.rst | 2 + docs/datacatalog_v1beta1/data_catalog.rst | 1 - .../policy_tag_manager.rst | 1 - google/cloud/datacatalog/__init__.py | 110 +- google/cloud/datacatalog_v1/__init__.py | 75 +- .../cloud/datacatalog_v1/gapic_metadata.json | 481 ++ .../cloud/datacatalog_v1/services/__init__.py | 1 - .../services/data_catalog/__init__.py | 2 - .../services/data_catalog/async_client.py | 391 +- .../services/data_catalog/client.py | 459 +- .../services/data_catalog/pagers.py | 10 +- .../data_catalog/transports/__init__.py | 2 - .../services/data_catalog/transports/base.py | 279 +- .../services/data_catalog/transports/grpc.py | 107 +- .../data_catalog/transports/grpc_asyncio.py | 110 +- .../services/policy_tag_manager/__init__.py | 22 + .../policy_tag_manager/async_client.py | 1246 +++++ .../services/policy_tag_manager/client.py | 1447 ++++++ .../services/policy_tag_manager/pagers.py | 283 ++ .../policy_tag_manager/transports/__init__.py | 33 + .../policy_tag_manager/transports/base.py | 342 ++ .../policy_tag_manager/transports/grpc.py | 600 +++ .../transports/grpc_asyncio.py | 613 +++ .../__init__.py | 22 + .../async_client.py | 301 ++ .../client.py | 489 ++ .../transports/__init__.py | 35 + .../transports/base.py | 202 + .../transports/grpc.py | 303 ++ .../transports/grpc_asyncio.py | 306 ++ google/cloud/datacatalog_v1/types/__init__.py | 56 +- google/cloud/datacatalog_v1/types/common.py | 3 +- .../cloud/datacatalog_v1/types/data_source.py | 45 + .../cloud/datacatalog_v1/types/datacatalog.py | 517 +- .../datacatalog_v1/types/gcs_fileset_spec.py | 14 +- .../datacatalog_v1/types/policytagmanager.py | 390 ++ .../types/policytagmanagerserialization.py | 196 + google/cloud/datacatalog_v1/types/schema.py | 36 +- google/cloud/datacatalog_v1/types/search.py | 31 +- .../cloud/datacatalog_v1/types/table_spec.py | 20 +- google/cloud/datacatalog_v1/types/tags.py | 149 +- .../cloud/datacatalog_v1/types/timestamps.py | 13 +- google/cloud/datacatalog_v1beta1/__init__.py | 19 +- .../datacatalog_v1beta1/gapic_metadata.json | 471 ++ .../datacatalog_v1beta1/services/__init__.py | 1 - .../services/data_catalog/__init__.py | 2 - .../services/data_catalog/async_client.py | 148 +- .../services/data_catalog/client.py | 196 +- .../services/data_catalog/pagers.py | 10 +- .../data_catalog/transports/__init__.py | 2 - .../services/data_catalog/transports/base.py | 263 +- .../services/data_catalog/transports/grpc.py | 69 +- .../data_catalog/transports/grpc_asyncio.py | 71 +- .../services/policy_tag_manager/__init__.py | 2 - .../policy_tag_manager/async_client.py | 72 +- .../services/policy_tag_manager/client.py | 129 +- .../services/policy_tag_manager/pagers.py | 6 +- .../policy_tag_manager/transports/__init__.py | 2 - .../policy_tag_manager/transports/base.py | 182 +- .../policy_tag_manager/transports/grpc.py | 55 +- .../transports/grpc_asyncio.py | 59 +- .../__init__.py | 2 - .../async_client.py | 28 +- .../client.py | 65 +- .../transports/__init__.py | 2 - .../transports/base.py | 110 +- .../transports/grpc.py | 22 +- .../transports/grpc_asyncio.py | 23 +- .../datacatalog_v1beta1/types/__init__.py | 2 - .../cloud/datacatalog_v1beta1/types/common.py | 2 - .../datacatalog_v1beta1/types/datacatalog.py | 190 +- .../types/gcs_fileset_spec.py | 14 +- .../types/policytagmanager.py | 74 +- .../types/policytagmanagerserialization.py | 27 +- .../cloud/datacatalog_v1beta1/types/schema.py | 15 +- .../cloud/datacatalog_v1beta1/types/search.py | 11 +- .../datacatalog_v1beta1/types/table_spec.py | 19 +- .../cloud/datacatalog_v1beta1/types/tags.py | 63 +- .../datacatalog_v1beta1/types/timestamps.py | 13 +- scripts/fixup_datacatalog_v1_keywords.py | 72 +- scripts/fixup_datacatalog_v1beta1_keywords.py | 83 +- testing/constraints-3.6.txt | 3 +- tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/datacatalog_v1/__init__.py | 1 - .../gapic/datacatalog_v1/test_data_catalog.py | 1821 +++---- .../datacatalog_v1/test_policy_tag_manager.py | 4213 +++++++++++++++++ .../test_policy_tag_manager_serialization.py | 1493 ++++++ .../gapic/datacatalog_v1beta1/__init__.py | 1 - .../datacatalog_v1beta1/test_data_catalog.py | 1471 +++--- .../test_policy_tag_manager.py | 860 ++-- .../test_policy_tag_manager_serialization.py | 338 +- 96 files changed, 18301 insertions(+), 4303 deletions(-) create mode 100644 docs/datacatalog_v1/policy_tag_manager.rst create mode 100644 docs/datacatalog_v1/policy_tag_manager_serialization.rst create mode 100644 google/cloud/datacatalog_v1/gapic_metadata.json create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py create mode 100644 google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py create mode 100644 google/cloud/datacatalog_v1/types/data_source.py create mode 100644 google/cloud/datacatalog_v1/types/policytagmanager.py create mode 100644 google/cloud/datacatalog_v1/types/policytagmanagerserialization.py create mode 100644 google/cloud/datacatalog_v1beta1/gapic_metadata.json create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py create mode 100644 tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py create mode 100644 tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py diff --git a/docs/datacatalog_v1/data_catalog.rst b/docs/datacatalog_v1/data_catalog.rst index 1f955a11..6141a1c7 100644 --- a/docs/datacatalog_v1/data_catalog.rst +++ b/docs/datacatalog_v1/data_catalog.rst @@ -5,7 +5,6 @@ DataCatalog :members: :inherited-members: - .. automodule:: google.cloud.datacatalog_v1.services.data_catalog.pagers :members: :inherited-members: diff --git a/docs/datacatalog_v1/policy_tag_manager.rst b/docs/datacatalog_v1/policy_tag_manager.rst new file mode 100644 index 00000000..03d2846f --- /dev/null +++ b/docs/datacatalog_v1/policy_tag_manager.rst @@ -0,0 +1,10 @@ +PolicyTagManager +---------------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager.pagers + :members: + :inherited-members: diff --git a/docs/datacatalog_v1/policy_tag_manager_serialization.rst b/docs/datacatalog_v1/policy_tag_manager_serialization.rst new file mode 100644 index 00000000..f7006d1b --- /dev/null +++ b/docs/datacatalog_v1/policy_tag_manager_serialization.rst @@ -0,0 +1,6 @@ +PolicyTagManagerSerialization +----------------------------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager_serialization + :members: + :inherited-members: diff --git a/docs/datacatalog_v1/services.rst b/docs/datacatalog_v1/services.rst index fd21338e..a70d3132 100644 --- a/docs/datacatalog_v1/services.rst +++ b/docs/datacatalog_v1/services.rst @@ -4,3 +4,5 @@ Services for Google Cloud Datacatalog v1 API :maxdepth: 2 data_catalog + policy_tag_manager + policy_tag_manager_serialization diff --git a/docs/datacatalog_v1beta1/data_catalog.rst b/docs/datacatalog_v1beta1/data_catalog.rst index e3b0675d..82ca26f3 100644 --- a/docs/datacatalog_v1beta1/data_catalog.rst +++ b/docs/datacatalog_v1beta1/data_catalog.rst @@ -5,7 +5,6 @@ DataCatalog :members: :inherited-members: - .. automodule:: google.cloud.datacatalog_v1beta1.services.data_catalog.pagers :members: :inherited-members: diff --git a/docs/datacatalog_v1beta1/policy_tag_manager.rst b/docs/datacatalog_v1beta1/policy_tag_manager.rst index 01a7cf6b..8971945c 100644 --- a/docs/datacatalog_v1beta1/policy_tag_manager.rst +++ b/docs/datacatalog_v1beta1/policy_tag_manager.rst @@ -5,7 +5,6 @@ PolicyTagManager :members: :inherited-members: - .. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers :members: :inherited-members: diff --git a/google/cloud/datacatalog/__init__.py b/google/cloud/datacatalog/__init__.py index c33e190b..99800a10 100644 --- a/google/cloud/datacatalog/__init__.py +++ b/google/cloud/datacatalog/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,24 +14,25 @@ # limitations under the License. # -from google.cloud.datacatalog_v1beta1.services.data_catalog.async_client import ( - DataCatalogAsyncClient, -) from google.cloud.datacatalog_v1beta1.services.data_catalog.client import ( DataCatalogClient, ) -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.async_client import ( - PolicyTagManagerAsyncClient, +from google.cloud.datacatalog_v1beta1.services.data_catalog.async_client import ( + DataCatalogAsyncClient, ) from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.client import ( PolicyTagManagerClient, ) -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.async_client import ( - PolicyTagManagerSerializationAsyncClient, +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.async_client import ( + PolicyTagManagerAsyncClient, ) from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.client import ( PolicyTagManagerSerializationClient, ) +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.async_client import ( + PolicyTagManagerSerializationAsyncClient, +) + from google.cloud.datacatalog_v1beta1.types.common import IntegratedSystem from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateEntryGroupRequest from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateEntryRequest @@ -50,7 +50,6 @@ from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagTemplateRequest from google.cloud.datacatalog_v1beta1.types.datacatalog import Entry from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryGroup -from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryType from google.cloud.datacatalog_v1beta1.types.datacatalog import GetEntryGroupRequest from google.cloud.datacatalog_v1beta1.types.datacatalog import GetEntryRequest from google.cloud.datacatalog_v1beta1.types.datacatalog import GetTagTemplateRequest @@ -73,8 +72,9 @@ UpdateTagTemplateFieldRequest, ) from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagTemplateRequest -from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFileSpec +from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryType from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFilesetSpec +from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFileSpec from google.cloud.datacatalog_v1beta1.types.policytagmanager import ( CreatePolicyTagRequest, ) @@ -136,9 +136,9 @@ from google.cloud.datacatalog_v1beta1.types.search import SearchResultType from google.cloud.datacatalog_v1beta1.types.table_spec import BigQueryDateShardedSpec from google.cloud.datacatalog_v1beta1.types.table_spec import BigQueryTableSpec -from google.cloud.datacatalog_v1beta1.types.table_spec import TableSourceType from google.cloud.datacatalog_v1beta1.types.table_spec import TableSpec from google.cloud.datacatalog_v1beta1.types.table_spec import ViewSpec +from google.cloud.datacatalog_v1beta1.types.table_spec import TableSourceType from google.cloud.datacatalog_v1beta1.types.tags import FieldType from google.cloud.datacatalog_v1beta1.types.tags import Tag from google.cloud.datacatalog_v1beta1.types.tags import TagField @@ -147,80 +147,80 @@ from google.cloud.datacatalog_v1beta1.types.timestamps import SystemTimestamps __all__ = ( - "BigQueryDateShardedSpec", - "BigQueryTableSpec", - "ColumnSchema", + "DataCatalogClient", + "DataCatalogAsyncClient", + "PolicyTagManagerClient", + "PolicyTagManagerAsyncClient", + "PolicyTagManagerSerializationClient", + "PolicyTagManagerSerializationAsyncClient", + "IntegratedSystem", "CreateEntryGroupRequest", "CreateEntryRequest", - "CreatePolicyTagRequest", "CreateTagRequest", "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", - "CreateTaxonomyRequest", - "DataCatalogAsyncClient", - "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", - "DeletePolicyTagRequest", "DeleteTagRequest", "DeleteTagTemplateFieldRequest", "DeleteTagTemplateRequest", - "DeleteTaxonomyRequest", "Entry", "EntryGroup", - "EntryType", - "ExportTaxonomiesRequest", - "ExportTaxonomiesResponse", - "FieldType", - "GcsFileSpec", - "GcsFilesetSpec", "GetEntryGroupRequest", "GetEntryRequest", - "GetPolicyTagRequest", "GetTagTemplateRequest", - "GetTaxonomyRequest", - "ImportTaxonomiesRequest", - "ImportTaxonomiesResponse", - "InlineSource", - "IntegratedSystem", "ListEntriesRequest", "ListEntriesResponse", "ListEntryGroupsRequest", "ListEntryGroupsResponse", - "ListPolicyTagsRequest", - "ListPolicyTagsResponse", "ListTagsRequest", "ListTagsResponse", - "ListTaxonomiesRequest", - "ListTaxonomiesResponse", "LookupEntryRequest", - "PolicyTag", - "PolicyTagManagerAsyncClient", - "PolicyTagManagerClient", - "PolicyTagManagerSerializationAsyncClient", - "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", - "Schema", "SearchCatalogRequest", "SearchCatalogResponse", - "SearchCatalogResult", - "SearchResultType", - "SerializedPolicyTag", - "SerializedTaxonomy", - "SystemTimestamps", - "TableSourceType", - "TableSpec", - "Tag", - "TagField", - "TagTemplate", - "TagTemplateField", - "Taxonomy", "UpdateEntryGroupRequest", "UpdateEntryRequest", - "UpdatePolicyTagRequest", "UpdateTagRequest", "UpdateTagTemplateFieldRequest", "UpdateTagTemplateRequest", + "EntryType", + "GcsFilesetSpec", + "GcsFileSpec", + "CreatePolicyTagRequest", + "CreateTaxonomyRequest", + "DeletePolicyTagRequest", + "DeleteTaxonomyRequest", + "GetPolicyTagRequest", + "GetTaxonomyRequest", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", + "PolicyTag", + "Taxonomy", + "UpdatePolicyTagRequest", "UpdateTaxonomyRequest", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", + "ImportTaxonomiesRequest", + "ImportTaxonomiesResponse", + "InlineSource", + "SerializedPolicyTag", + "SerializedTaxonomy", + "ColumnSchema", + "Schema", + "SearchCatalogResult", + "SearchResultType", + "BigQueryDateShardedSpec", + "BigQueryTableSpec", + "TableSpec", "ViewSpec", + "TableSourceType", + "FieldType", + "Tag", + "TagField", + "TagTemplate", + "TagTemplateField", + "SystemTimestamps", ) diff --git a/google/cloud/datacatalog_v1/__init__.py b/google/cloud/datacatalog_v1/__init__.py index 734df087..c793267f 100644 --- a/google/cloud/datacatalog_v1/__init__.py +++ b/google/cloud/datacatalog_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,12 +15,24 @@ # from .services.data_catalog import DataCatalogClient +from .services.data_catalog import DataCatalogAsyncClient +from .services.policy_tag_manager import PolicyTagManagerClient +from .services.policy_tag_manager import PolicyTagManagerAsyncClient +from .services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationClient, +) +from .services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationAsyncClient, +) + from .types.common import IntegratedSystem +from .types.data_source import DataSource from .types.datacatalog import CreateEntryGroupRequest from .types.datacatalog import CreateEntryRequest from .types.datacatalog import CreateTagRequest from .types.datacatalog import CreateTagTemplateFieldRequest from .types.datacatalog import CreateTagTemplateRequest +from .types.datacatalog import DatabaseTableSpec from .types.datacatalog import DeleteEntryGroupRequest from .types.datacatalog import DeleteEntryRequest from .types.datacatalog import DeleteTagRequest @@ -29,7 +40,6 @@ from .types.datacatalog import DeleteTagTemplateRequest from .types.datacatalog import Entry from .types.datacatalog import EntryGroup -from .types.datacatalog import EntryType from .types.datacatalog import GetEntryGroupRequest from .types.datacatalog import GetEntryRequest from .types.datacatalog import GetTagTemplateRequest @@ -40,6 +50,7 @@ from .types.datacatalog import ListTagsRequest from .types.datacatalog import ListTagsResponse from .types.datacatalog import LookupEntryRequest +from .types.datacatalog import RenameTagTemplateFieldEnumValueRequest from .types.datacatalog import RenameTagTemplateFieldRequest from .types.datacatalog import SearchCatalogRequest from .types.datacatalog import SearchCatalogResponse @@ -48,17 +59,40 @@ from .types.datacatalog import UpdateTagRequest from .types.datacatalog import UpdateTagTemplateFieldRequest from .types.datacatalog import UpdateTagTemplateRequest -from .types.gcs_fileset_spec import GcsFileSpec +from .types.datacatalog import EntryType from .types.gcs_fileset_spec import GcsFilesetSpec +from .types.gcs_fileset_spec import GcsFileSpec +from .types.policytagmanager import CreatePolicyTagRequest +from .types.policytagmanager import CreateTaxonomyRequest +from .types.policytagmanager import DeletePolicyTagRequest +from .types.policytagmanager import DeleteTaxonomyRequest +from .types.policytagmanager import GetPolicyTagRequest +from .types.policytagmanager import GetTaxonomyRequest +from .types.policytagmanager import ListPolicyTagsRequest +from .types.policytagmanager import ListPolicyTagsResponse +from .types.policytagmanager import ListTaxonomiesRequest +from .types.policytagmanager import ListTaxonomiesResponse +from .types.policytagmanager import PolicyTag +from .types.policytagmanager import Taxonomy +from .types.policytagmanager import UpdatePolicyTagRequest +from .types.policytagmanager import UpdateTaxonomyRequest +from .types.policytagmanagerserialization import CrossRegionalSource +from .types.policytagmanagerserialization import ExportTaxonomiesRequest +from .types.policytagmanagerserialization import ExportTaxonomiesResponse +from .types.policytagmanagerserialization import ImportTaxonomiesRequest +from .types.policytagmanagerserialization import ImportTaxonomiesResponse +from .types.policytagmanagerserialization import InlineSource +from .types.policytagmanagerserialization import SerializedPolicyTag +from .types.policytagmanagerserialization import SerializedTaxonomy from .types.schema import ColumnSchema from .types.schema import Schema from .types.search import SearchCatalogResult from .types.search import SearchResultType from .types.table_spec import BigQueryDateShardedSpec from .types.table_spec import BigQueryTableSpec -from .types.table_spec import TableSourceType from .types.table_spec import TableSpec from .types.table_spec import ViewSpec +from .types.table_spec import TableSourceType from .types.tags import FieldType from .types.tags import Tag from .types.tags import TagField @@ -66,44 +100,71 @@ from .types.tags import TagTemplateField from .types.timestamps import SystemTimestamps - __all__ = ( + "DataCatalogAsyncClient", + "PolicyTagManagerAsyncClient", + "PolicyTagManagerSerializationAsyncClient", "BigQueryDateShardedSpec", "BigQueryTableSpec", "ColumnSchema", "CreateEntryGroupRequest", "CreateEntryRequest", + "CreatePolicyTagRequest", "CreateTagRequest", "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", + "CreateTaxonomyRequest", + "CrossRegionalSource", + "DataCatalogClient", + "DataSource", + "DatabaseTableSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", + "DeletePolicyTagRequest", "DeleteTagRequest", "DeleteTagTemplateFieldRequest", "DeleteTagTemplateRequest", + "DeleteTaxonomyRequest", "Entry", "EntryGroup", "EntryType", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", "FieldType", "GcsFileSpec", "GcsFilesetSpec", "GetEntryGroupRequest", "GetEntryRequest", + "GetPolicyTagRequest", "GetTagTemplateRequest", + "GetTaxonomyRequest", + "ImportTaxonomiesRequest", + "ImportTaxonomiesResponse", + "InlineSource", "IntegratedSystem", "ListEntriesRequest", "ListEntriesResponse", "ListEntryGroupsRequest", "ListEntryGroupsResponse", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", "ListTagsRequest", "ListTagsResponse", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", "LookupEntryRequest", + "PolicyTag", + "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", + "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", "SearchCatalogResponse", "SearchCatalogResult", "SearchResultType", + "SerializedPolicyTag", + "SerializedTaxonomy", "SystemTimestamps", "TableSourceType", "TableSpec", @@ -111,11 +172,13 @@ "TagField", "TagTemplate", "TagTemplateField", + "Taxonomy", "UpdateEntryGroupRequest", "UpdateEntryRequest", + "UpdatePolicyTagRequest", "UpdateTagRequest", "UpdateTagTemplateFieldRequest", "UpdateTagTemplateRequest", + "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", ) diff --git a/google/cloud/datacatalog_v1/gapic_metadata.json b/google/cloud/datacatalog_v1/gapic_metadata.json new file mode 100644 index 00000000..fed22e35 --- /dev/null +++ b/google/cloud/datacatalog_v1/gapic_metadata.json @@ -0,0 +1,481 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.datacatalog_v1", + "protoPackage": "google.cloud.datacatalog.v1", + "schema": "1.0", + "services": { + "DataCatalog": { + "clients": { + "grpc": { + "libraryClient": "DataCatalogClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "RenameTagTemplateFieldEnumValue": { + "methods": [ + "rename_tag_template_field_enum_value" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataCatalogAsyncClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "RenameTagTemplateFieldEnumValue": { + "methods": [ + "rename_tag_template_field_enum_value" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + } + } + }, + "PolicyTagManager": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerAsyncClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + } + } + }, + "PolicyTagManagerSerialization": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerSerializationClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerSerializationAsyncClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/datacatalog_v1/services/__init__.py b/google/cloud/datacatalog_v1/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/datacatalog_v1/services/__init__.py +++ b/google/cloud/datacatalog_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/datacatalog_v1/services/data_catalog/__init__.py b/google/cloud/datacatalog_v1/services/data_catalog/__init__.py index e56ed8a6..55cf6fbb 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/__init__.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DataCatalogClient from .async_client import DataCatalogAsyncClient diff --git a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 5964dba2..6eca9dcc 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,14 +20,15 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.datacatalog_v1.services.data_catalog import pagers from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema @@ -37,10 +36,9 @@ from google.cloud.datacatalog_v1.types import table_spec from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport from .client import DataCatalogClient @@ -68,27 +66,28 @@ class DataCatalogAsyncClient: parse_tag_template_field_path = staticmethod( DataCatalogClient.parse_tag_template_field_path ) - + tag_template_field_enum_value_path = staticmethod( + DataCatalogClient.tag_template_field_enum_value_path + ) + parse_tag_template_field_enum_value_path = staticmethod( + DataCatalogClient.parse_tag_template_field_enum_value_path + ) common_billing_account_path = staticmethod( DataCatalogClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( DataCatalogClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DataCatalogClient.common_folder_path) parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) - common_organization_path = staticmethod(DataCatalogClient.common_organization_path) parse_common_organization_path = staticmethod( DataCatalogClient.parse_common_organization_path ) - common_project_path = staticmethod(DataCatalogClient.common_project_path) parse_common_project_path = staticmethod( DataCatalogClient.parse_common_project_path ) - common_location_path = staticmethod(DataCatalogClient.common_location_path) parse_common_location_path = staticmethod( DataCatalogClient.parse_common_location_path @@ -96,7 +95,8 @@ class DataCatalogAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -111,7 +111,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -128,7 +128,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataCatalogTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DataCatalogTransport: The transport used by the client instance. @@ -142,12 +142,12 @@ def transport(self) -> DataCatalogTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DataCatalogTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the data catalog client. + """Instantiates the data catalog client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -179,7 +179,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DataCatalogClient( credentials=credentials, transport=transport, @@ -203,7 +202,7 @@ async def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -230,8 +229,9 @@ async def search_catalog( on the ``request`` instance; if ``request`` is provided, this should not be set. query (:class:`str`): - Required. The query string in search query syntax. The - query must be non-empty. + Optional. The query string in search query syntax. An + empty query string will result in all data assets (in + the specified scope) that the user has access to. Query strings can be simple as "x" or more qualified as: @@ -248,7 +248,6 @@ async def search_catalog( This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -278,7 +277,6 @@ async def search_catalog( # If we have keyword arguments corresponding to fields on the # request, apply these. - if scope is not None: request.scope = scope if query is not None: @@ -292,7 +290,9 @@ async def search_catalog( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -349,24 +349,25 @@ async def create_entry_group( The request object. Request message for [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. parent (:class:`str`): - Required. The name of the project this entry group is - in. Example: + Required. The name of the project this entry group + belongs to. Example: - - projects/{project_id}/locations/{location} + ``projects/{project_id}/locations/{location}`` - Note that this EntryGroup and its child resources may - not actually be stored in the location in this name. + Note: The entry group itself and its child resources + might not be stored in the location specified in its + name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group_id (:class:`str`): - Required. The id of the entry group - to create. The id must begin with a - letter or underscore, contain only - English letters, numbers and - underscores, and be at most 64 - characters. + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. This corresponds to the ``entry_group_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -378,7 +379,6 @@ async def create_entry_group( This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -407,7 +407,6 @@ async def create_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_group_id is not None: @@ -440,7 +439,7 @@ async def get_entry_group( request: datacatalog.GetEntryGroupRequest = None, *, name: str = None, - read_mask: field_mask.FieldMask = None, + read_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -465,7 +464,6 @@ async def get_entry_group( This corresponds to the ``read_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -494,7 +492,6 @@ async def get_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if read_mask is not None: @@ -508,7 +505,9 @@ async def get_entry_group( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -532,7 +531,7 @@ async def update_entry_group( request: datacatalog.UpdateEntryGroupRequest = None, *, entry_group: datacatalog.EntryGroup = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -555,14 +554,17 @@ async def update_entry_group( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update on the entry - group. If absent or empty, all - modifiable fields are updated. + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -591,7 +593,6 @@ async def update_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry_group is not None: request.entry_group = entry_group if update_mask is not None: @@ -646,7 +647,6 @@ async def delete_entry_group( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -667,7 +667,6 @@ async def delete_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -715,7 +714,6 @@ async def list_entry_groups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -745,7 +743,6 @@ async def list_entry_groups( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -757,7 +754,9 @@ async def list_entry_groups( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -793,8 +792,8 @@ async def create_entry( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datacatalog.Entry: - r"""Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + r"""Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -809,20 +808,23 @@ async def create_entry( The request object. Request message for [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. parent (:class:`str`): - Required. The name of the entry group this entry is in. - Example: + Required. The name of the entry group this entry belongs + to. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not + be stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_id (:class:`str`): - Required. The id of the entry to - create. + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), and underscores (_). The maximum size is 64 bytes + when encoded in UTF-8. This corresponds to the ``entry_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -832,7 +834,6 @@ async def create_entry( This corresponds to the ``entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -869,7 +870,6 @@ async def create_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_id is not None: @@ -902,7 +902,7 @@ async def update_entry( request: datacatalog.UpdateEntryRequest = None, *, entry: datacatalog.Entry = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -925,8 +925,12 @@ async def update_entry( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update on the entry. If absent or empty, - all modifiable fields are updated. + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. The following fields are modifiable: @@ -934,7 +938,7 @@ async def update_entry( - ``schema`` - - For entries with type ``FILESET`` + - For entries with type ``FILESET``: - ``schema`` - ``display_name`` @@ -942,20 +946,19 @@ async def update_entry( - ``gcs_fileset_spec`` - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type`` + - For entries with ``user_specified_type``: - ``schema`` - ``display_name`` - ``description`` - - user_specified_type - - user_specified_system - - linked_resource - - source_system_timestamps + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -992,7 +995,6 @@ async def update_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry is not None: request.entry = entry if update_mask is not None: @@ -1049,7 +1051,6 @@ async def delete_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1070,7 +1071,6 @@ async def delete_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1116,7 +1116,6 @@ async def get_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1153,7 +1152,6 @@ async def get_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1165,7 +1163,9 @@ async def get_entry( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1201,7 +1201,6 @@ async def lookup_entry( request (:class:`google.cloud.datacatalog_v1.types.LookupEntryRequest`): The request object. Request message for [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1225,7 +1224,6 @@ async def lookup_entry( """ # Create or coerce a protobuf request object. - request = datacatalog.LookupEntryRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1236,7 +1234,9 @@ async def lookup_entry( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1273,7 +1273,6 @@ async def list_entries( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1303,7 +1302,6 @@ async def list_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1315,7 +1313,9 @@ async def list_entries( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1374,8 +1374,12 @@ async def create_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. tag_template_id (:class:`str`): - Required. The id of the tag template - to create. + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), + numbers (0-9), or underscores (_), and must start with a + letter or underscore. The maximum size is 64 bytes when + encoded in UTF-8. This corresponds to the ``tag_template_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1385,7 +1389,6 @@ async def create_tag_template( This corresponds to the ``tag_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1419,7 +1422,6 @@ async def create_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_id is not None: @@ -1470,7 +1472,6 @@ async def get_tag_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1504,7 +1505,6 @@ async def get_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1533,7 +1533,7 @@ async def update_tag_template( request: datacatalog.UpdateTagTemplateRequest = None, *, tag_template: tags.TagTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1560,20 +1560,18 @@ async def update_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The field mask specifies the parts of the template to - overwrite. - - Allowed fields: - - - ``display_name`` + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` can be + overwritten. - If absent or empty, all of the allowed fields above will - be updated. + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1607,7 +1605,6 @@ async def update_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag_template is not None: request.tag_template = tag_template if update_mask is not None: @@ -1673,7 +1670,6 @@ async def delete_tag_template( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1694,7 +1690,6 @@ async def delete_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -1754,7 +1749,11 @@ async def create_tag_template_field( should not be set. tag_template_field_id (:class:`str`): Required. The ID of the tag template field to create. - Field ids can contain letters (both uppercase and + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and lowercase), numbers (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 character long and at most 128 characters long. Field IDs must also be unique @@ -1770,7 +1769,6 @@ async def create_tag_template_field( This corresponds to the ``tag_template_field`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1797,7 +1795,6 @@ async def create_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_field_id is not None: @@ -1831,7 +1828,7 @@ async def update_tag_template_field( *, name: str = None, tag_template_field: tags.TagTemplateField = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1861,26 +1858,28 @@ async def update_tag_template_field( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. The field mask specifies the parts of the - template to be updated. Allowed fields: + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: - ``display_name`` - ``type.enum_type`` - ``is_required`` - If ``update_mask`` is not set or empty, all of the - allowed fields above will be updated. + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. - When updating an enum type, the provided values will be - merged with the existing values. Therefore, enum values - can only be added, existing enum values cannot be - deleted nor renamed. Updating a template field from - optional to required is NOT allowed. + Additionally, updating a template field from optional to + required is *not* allowed. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1907,7 +1906,6 @@ async def update_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if tag_template_field is not None: @@ -1970,7 +1968,6 @@ async def rename_tag_template_field( This corresponds to the ``new_tag_template_field_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1997,7 +1994,6 @@ async def rename_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if new_tag_template_field_id is not None: @@ -2023,6 +2019,89 @@ async def rename_tag_template_field( # Done; return the response. return response + async def rename_tag_template_field_enum_value( + self, + request: datacatalog.RenameTagTemplateFieldEnumValueRequest = None, + *, + name: str = None, + new_enum_value_display_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest`): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (:class:`str`): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (:class:`str`): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def delete_tag_template_field( self, request: datacatalog.DeleteTagTemplateFieldRequest = None, @@ -2062,7 +2141,6 @@ async def delete_tag_template_field( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2083,7 +2161,6 @@ async def delete_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -2131,12 +2208,13 @@ async def create_tag( [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. parent (:class:`str`): Required. The name of the resource to attach this tag - to. Tags can be attached to Entries. Example: + to. Tags can be attached to entries. An entry can have + up to 1000 attached tags. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Tag and its child resources may not - actually be stored in the location in this name. + Note: The tag and its child resources might not be + stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2146,7 +2224,6 @@ async def create_tag( This corresponds to the ``tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2179,7 +2256,6 @@ async def create_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag is not None: @@ -2210,7 +2286,7 @@ async def update_tag( request: datacatalog.UpdateTagRequest = None, *, tag: tags.Tag = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2229,14 +2305,18 @@ async def update_tag( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only - modifiable field is the field ``fields``. + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2269,7 +2349,6 @@ async def update_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag is not None: request.tag = tag if update_mask is not None: @@ -2318,7 +2397,6 @@ async def delete_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2339,7 +2417,6 @@ async def delete_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2391,7 +2468,6 @@ async def list_tags( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2421,7 +2497,6 @@ async def list_tags( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2433,7 +2508,9 @@ async def list_tags( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -2460,13 +2537,13 @@ async def list_tags( async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2498,7 +2575,6 @@ async def set_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2577,10 +2653,9 @@ async def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) - + request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest(resource=resource,) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2604,13 +2679,13 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a @@ -2646,7 +2721,6 @@ async def get_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2725,10 +2799,9 @@ async def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) - + request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest(resource=resource,) + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2738,7 +2811,9 @@ async def get_iam_policy( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -2759,12 +2834,12 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Returns the caller's permissions on a resource. If the resource does not exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). @@ -2784,7 +2859,6 @@ async def test_iam_permissions( request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2796,11 +2870,10 @@ async def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1/services/data_catalog/client.py b/google/cloud/datacatalog_v1/services/data_catalog/client.py index ea8551d4..62453c6f 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,6 +32,7 @@ from google.cloud.datacatalog_v1.services.data_catalog import pagers from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema @@ -41,10 +40,9 @@ from google.cloud.datacatalog_v1.types import table_spec from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataCatalogGrpcTransport from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport @@ -63,7 +61,7 @@ class DataCatalogClientMeta(type): _transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[DataCatalogTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -88,7 +86,8 @@ class DataCatalogClient(metaclass=DataCatalogClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -122,7 +121,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -139,7 +139,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -158,23 +158,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataCatalogTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DataCatalogTransport: The transport used by the client instance. + DataCatalogTransport: The transport used by the client + instance. """ return self._transport @staticmethod def entry_path(project: str, location: str, entry_group: str, entry: str,) -> str: - """Return a fully-qualified entry string.""" + """Returns a fully-qualified entry string.""" return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( project=project, location=location, entry_group=entry_group, entry=entry, ) @staticmethod def parse_entry_path(path: str) -> Dict[str, str]: - """Parse a entry path into its component segments.""" + """Parses a entry path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path, @@ -183,14 +184,14 @@ def parse_entry_path(path: str) -> Dict[str, str]: @staticmethod def entry_group_path(project: str, location: str, entry_group: str,) -> str: - """Return a fully-qualified entry_group string.""" + """Returns a fully-qualified entry_group string.""" return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( project=project, location=location, entry_group=entry_group, ) @staticmethod def parse_entry_group_path(path: str) -> Dict[str, str]: - """Parse a entry_group path into its component segments.""" + """Parses a entry_group path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path, @@ -201,7 +202,7 @@ def parse_entry_group_path(path: str) -> Dict[str, str]: def tag_path( project: str, location: str, entry_group: str, entry: str, tag: str, ) -> str: - """Return a fully-qualified tag string.""" + """Returns a fully-qualified tag string.""" return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( project=project, location=location, @@ -212,7 +213,7 @@ def tag_path( @staticmethod def parse_tag_path(path: str) -> Dict[str, str]: - """Parse a tag path into its component segments.""" + """Parses a tag path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)/tags/(?P.+?)$", path, @@ -221,14 +222,14 @@ def parse_tag_path(path: str) -> Dict[str, str]: @staticmethod def tag_template_path(project: str, location: str, tag_template: str,) -> str: - """Return a fully-qualified tag_template string.""" + """Returns a fully-qualified tag_template string.""" return "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( project=project, location=location, tag_template=tag_template, ) @staticmethod def parse_tag_template_path(path: str) -> Dict[str, str]: - """Parse a tag_template path into its component segments.""" + """Parses a tag_template path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)$", path, @@ -239,23 +240,49 @@ def parse_tag_template_path(path: str) -> Dict[str, str]: def tag_template_field_path( project: str, location: str, tag_template: str, field: str, ) -> str: - """Return a fully-qualified tag_template_field string.""" + """Returns a fully-qualified tag_template_field string.""" return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, ) @staticmethod def parse_tag_template_field_path(path: str) -> Dict[str, str]: - """Parse a tag_template_field path into its component segments.""" + """Parses a tag_template_field path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)$", path, ) return m.groupdict() if m else {} + @staticmethod + def tag_template_field_enum_value_path( + project: str, + location: str, + tag_template: str, + tag_template_field_id: str, + enum_value_display_name: str, + ) -> str: + """Returns a fully-qualified tag_template_field_enum_value string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format( + project=project, + location=location, + tag_template=tag_template, + tag_template_field_id=tag_template_field_id, + enum_value_display_name=enum_value_display_name, + ) + + @staticmethod + def parse_tag_template_field_enum_value_path(path: str) -> Dict[str, str]: + """Parses a tag_template_field_enum_value path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)/enumValues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -268,7 +295,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -279,7 +306,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -290,7 +317,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -301,7 +328,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -315,12 +342,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DataCatalogTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the data catalog client. + """Instantiates the data catalog client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -375,9 +402,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -389,12 +417,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -409,8 +439,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -441,7 +471,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -468,8 +498,9 @@ def search_catalog( on the ``request`` instance; if ``request`` is provided, this should not be set. query (str): - Required. The query string in search query syntax. The - query must be non-empty. + Optional. The query string in search query syntax. An + empty query string will result in all data assets (in + the specified scope) that the user has access to. Query strings can be simple as "x" or more qualified as: @@ -486,7 +517,6 @@ def search_catalog( This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -518,10 +548,8 @@ def search_catalog( # there are no flattened fields. if not isinstance(request, datacatalog.SearchCatalogRequest): request = datacatalog.SearchCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if scope is not None: request.scope = scope if query is not None: @@ -581,24 +609,25 @@ def create_entry_group( The request object. Request message for [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. parent (str): - Required. The name of the project this entry group is - in. Example: + Required. The name of the project this entry group + belongs to. Example: - - projects/{project_id}/locations/{location} + ``projects/{project_id}/locations/{location}`` - Note that this EntryGroup and its child resources may - not actually be stored in the location in this name. + Note: The entry group itself and its child resources + might not be stored in the location specified in its + name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group_id (str): - Required. The id of the entry group - to create. The id must begin with a - letter or underscore, contain only - English letters, numbers and - underscores, and be at most 64 - characters. + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. This corresponds to the ``entry_group_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -610,7 +639,6 @@ def create_entry_group( This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -641,10 +669,8 @@ def create_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.CreateEntryGroupRequest): request = datacatalog.CreateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_group_id is not None: @@ -673,7 +699,7 @@ def get_entry_group( request: datacatalog.GetEntryGroupRequest = None, *, name: str = None, - read_mask: field_mask.FieldMask = None, + read_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -698,7 +724,6 @@ def get_entry_group( This corresponds to the ``read_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -729,10 +754,8 @@ def get_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.GetEntryGroupRequest): request = datacatalog.GetEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if read_mask is not None: @@ -759,7 +782,7 @@ def update_entry_group( request: datacatalog.UpdateEntryGroupRequest = None, *, entry_group: datacatalog.EntryGroup = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -782,14 +805,17 @@ def update_entry_group( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry - group. If absent or empty, all - modifiable fields are updated. + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -820,10 +846,8 @@ def update_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateEntryGroupRequest): request = datacatalog.UpdateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry_group is not None: request.entry_group = entry_group if update_mask is not None: @@ -874,7 +898,6 @@ def delete_entry_group( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -897,10 +920,8 @@ def delete_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteEntryGroupRequest): request = datacatalog.DeleteEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -944,7 +965,6 @@ def list_entry_groups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -976,10 +996,8 @@ def list_entry_groups( # there are no flattened fields. if not isinstance(request, datacatalog.ListEntryGroupsRequest): request = datacatalog.ListEntryGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1016,8 +1034,8 @@ def create_entry( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datacatalog.Entry: - r"""Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + r"""Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -1032,20 +1050,23 @@ def create_entry( The request object. Request message for [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. parent (str): - Required. The name of the entry group this entry is in. - Example: + Required. The name of the entry group this entry belongs + to. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not + be stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_id (str): - Required. The id of the entry to - create. + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), and underscores (_). The maximum size is 64 bytes + when encoded in UTF-8. This corresponds to the ``entry_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1055,7 +1076,6 @@ def create_entry( This corresponds to the ``entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1094,10 +1114,8 @@ def create_entry( # there are no flattened fields. if not isinstance(request, datacatalog.CreateEntryRequest): request = datacatalog.CreateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_id is not None: @@ -1126,7 +1144,7 @@ def update_entry( request: datacatalog.UpdateEntryRequest = None, *, entry: datacatalog.Entry = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1149,8 +1167,12 @@ def update_entry( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry. If absent or empty, - all modifiable fields are updated. + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. The following fields are modifiable: @@ -1158,7 +1180,7 @@ def update_entry( - ``schema`` - - For entries with type ``FILESET`` + - For entries with type ``FILESET``: - ``schema`` - ``display_name`` @@ -1166,20 +1188,19 @@ def update_entry( - ``gcs_fileset_spec`` - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type`` + - For entries with ``user_specified_type``: - ``schema`` - ``display_name`` - ``description`` - - user_specified_type - - user_specified_system - - linked_resource - - source_system_timestamps + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1218,10 +1239,8 @@ def update_entry( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateEntryRequest): request = datacatalog.UpdateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry is not None: request.entry = entry if update_mask is not None: @@ -1274,7 +1293,6 @@ def delete_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1297,10 +1315,8 @@ def delete_entry( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteEntryRequest): request = datacatalog.DeleteEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1342,7 +1358,6 @@ def get_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1381,10 +1396,8 @@ def get_entry( # there are no flattened fields. if not isinstance(request, datacatalog.GetEntryRequest): request = datacatalog.GetEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1421,7 +1434,6 @@ def lookup_entry( request (google.cloud.datacatalog_v1.types.LookupEntryRequest): The request object. Request message for [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1445,7 +1457,6 @@ def lookup_entry( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a datacatalog.LookupEntryRequest. # There's no risk of modifying the input as we've already verified @@ -1487,7 +1498,6 @@ def list_entries( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1519,10 +1529,8 @@ def list_entries( # there are no flattened fields. if not isinstance(request, datacatalog.ListEntriesRequest): request = datacatalog.ListEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1582,8 +1590,12 @@ def create_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. tag_template_id (str): - Required. The id of the tag template - to create. + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), + numbers (0-9), or underscores (_), and must start with a + letter or underscore. The maximum size is 64 bytes when + encoded in UTF-8. This corresponds to the ``tag_template_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1593,7 +1605,6 @@ def create_tag_template( This corresponds to the ``tag_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1629,10 +1640,8 @@ def create_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.CreateTagTemplateRequest): request = datacatalog.CreateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_id is not None: @@ -1679,7 +1688,6 @@ def get_tag_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1715,10 +1723,8 @@ def get_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.GetTagTemplateRequest): request = datacatalog.GetTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1743,7 +1749,7 @@ def update_tag_template( request: datacatalog.UpdateTagTemplateRequest = None, *, tag_template: tags.TagTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1770,20 +1776,18 @@ def update_tag_template( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The field mask specifies the parts of the template to - overwrite. - - Allowed fields: + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` can be + overwritten. - - ``display_name`` - - If absent or empty, all of the allowed fields above will - be updated. + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1819,10 +1823,8 @@ def update_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateTagTemplateRequest): request = datacatalog.UpdateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag_template is not None: request.tag_template = tag_template if update_mask is not None: @@ -1884,7 +1886,6 @@ def delete_tag_template( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1907,10 +1908,8 @@ def delete_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteTagTemplateRequest): request = datacatalog.DeleteTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -1966,7 +1965,11 @@ def create_tag_template_field( should not be set. tag_template_field_id (str): Required. The ID of the tag template field to create. - Field ids can contain letters (both uppercase and + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and lowercase), numbers (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 character long and at most 128 characters long. Field IDs must also be unique @@ -1982,7 +1985,6 @@ def create_tag_template_field( This corresponds to the ``tag_template_field`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2011,10 +2013,8 @@ def create_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.CreateTagTemplateFieldRequest): request = datacatalog.CreateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_field_id is not None: @@ -2046,7 +2046,7 @@ def update_tag_template_field( *, name: str = None, tag_template_field: tags.TagTemplateField = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2076,26 +2076,28 @@ def update_tag_template_field( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The field mask specifies the parts of the - template to be updated. Allowed fields: + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: - ``display_name`` - ``type.enum_type`` - ``is_required`` - If ``update_mask`` is not set or empty, all of the - allowed fields above will be updated. + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. - When updating an enum type, the provided values will be - merged with the existing values. Therefore, enum values - can only be added, existing enum values cannot be - deleted nor renamed. Updating a template field from - optional to required is NOT allowed. + Additionally, updating a template field from optional to + required is *not* allowed. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2124,10 +2126,8 @@ def update_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateTagTemplateFieldRequest): request = datacatalog.UpdateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if tag_template_field is not None: @@ -2188,7 +2188,6 @@ def rename_tag_template_field( This corresponds to the ``new_tag_template_field_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2217,10 +2216,8 @@ def rename_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.RenameTagTemplateFieldRequest): request = datacatalog.RenameTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if new_tag_template_field_id is not None: @@ -2244,6 +2241,91 @@ def rename_tag_template_field( # Done; return the response. return response + def rename_tag_template_field_enum_value( + self, + request: datacatalog.RenameTagTemplateFieldEnumValueRequest = None, + *, + name: str = None, + new_enum_value_display_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Args: + request (google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (str): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.RenameTagTemplateFieldEnumValueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.RenameTagTemplateFieldEnumValueRequest): + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rename_tag_template_field_enum_value + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + def delete_tag_template_field( self, request: datacatalog.DeleteTagTemplateFieldRequest = None, @@ -2283,7 +2365,6 @@ def delete_tag_template_field( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2306,10 +2387,8 @@ def delete_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteTagTemplateFieldRequest): request = datacatalog.DeleteTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -2355,12 +2434,13 @@ def create_tag( [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. parent (str): Required. The name of the resource to attach this tag - to. Tags can be attached to Entries. Example: + to. Tags can be attached to entries. An entry can have + up to 1000 attached tags. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Tag and its child resources may not - actually be stored in the location in this name. + Note: The tag and its child resources might not be + stored in the location specified in its name. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2370,7 +2450,6 @@ def create_tag( This corresponds to the ``tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2405,10 +2484,8 @@ def create_tag( # there are no flattened fields. if not isinstance(request, datacatalog.CreateTagRequest): request = datacatalog.CreateTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag is not None: @@ -2435,7 +2512,7 @@ def update_tag( request: datacatalog.UpdateTagRequest = None, *, tag: tags.Tag = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2454,14 +2531,18 @@ def update_tag( on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only - modifiable field is the field ``fields``. + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2496,10 +2577,8 @@ def update_tag( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateTagRequest): request = datacatalog.UpdateTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag is not None: request.tag = tag if update_mask is not None: @@ -2544,7 +2623,6 @@ def delete_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2567,10 +2645,8 @@ def delete_tag( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteTagRequest): request = datacatalog.DeleteTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2618,7 +2694,6 @@ def list_tags( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2650,10 +2725,8 @@ def list_tags( # there are no flattened fields. if not isinstance(request, datacatalog.ListTagsRequest): request = datacatalog.ListTagsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2681,13 +2754,13 @@ def list_tags( def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2719,7 +2792,6 @@ def set_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2798,11 +2870,10 @@ def set_iam_policy( if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.SetIamPolicyRequest() - + request = iam_policy_pb2.SetIamPolicyRequest() if resource is not None: request.resource = resource @@ -2824,13 +2895,13 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a @@ -2866,7 +2937,6 @@ def get_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2945,11 +3015,10 @@ def get_iam_policy( if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.GetIamPolicyRequest() - + request = iam_policy_pb2.GetIamPolicyRequest() if resource is not None: request.resource = resource @@ -2971,12 +3040,12 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Returns the caller's permissions on a resource. If the resource does not exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). @@ -2996,7 +3065,6 @@ def test_iam_permissions( request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3008,14 +3076,13 @@ def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1/services/data_catalog/pagers.py b/google/cloud/datacatalog_v1/services/data_catalog/pagers.py index 7ce770e2..6fe3eed1 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/pagers.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -247,7 +245,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -375,7 +373,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -503,7 +501,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py index f3f1cf12..f6669083 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py index 0b3a9142..6f790452 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import tags -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -41,27 +40,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class DataCatalogTransport(abc.ABC): """Abstract transport class for DataCatalog.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "datacatalog.googleapis.com" + def __init__( self, *, - host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -70,7 +83,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,29 +97,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -116,7 +176,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -131,7 +193,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -149,7 +213,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -170,7 +236,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -182,7 +250,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -194,7 +264,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -227,6 +299,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.rename_tag_template_field_enum_value: gapic_v1.method.wrap_method( + self.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=client_info, + ), self.delete_tag_template_field: gapic_v1.method.wrap_method( self.delete_tag_template_field, default_timeout=None, @@ -247,7 +324,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -262,7 +341,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -278,11 +359,11 @@ def _prep_wrapped_messages(self, client_info): @property def search_catalog( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.SearchCatalogRequest], - typing.Union[ + Union[ datacatalog.SearchCatalogResponse, - typing.Awaitable[datacatalog.SearchCatalogResponse], + Awaitable[datacatalog.SearchCatalogResponse], ], ]: raise NotImplementedError() @@ -290,47 +371,47 @@ def search_catalog( @property def create_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateEntryGroupRequest], - typing.Union[datacatalog.EntryGroup, typing.Awaitable[datacatalog.EntryGroup]], + Union[datacatalog.EntryGroup, Awaitable[datacatalog.EntryGroup]], ]: raise NotImplementedError() @property def get_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.GetEntryGroupRequest], - typing.Union[datacatalog.EntryGroup, typing.Awaitable[datacatalog.EntryGroup]], + Union[datacatalog.EntryGroup, Awaitable[datacatalog.EntryGroup]], ]: raise NotImplementedError() @property def update_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateEntryGroupRequest], - typing.Union[datacatalog.EntryGroup, typing.Awaitable[datacatalog.EntryGroup]], + Union[datacatalog.EntryGroup, Awaitable[datacatalog.EntryGroup]], ]: raise NotImplementedError() @property def delete_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteEntryGroupRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_entry_groups( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.ListEntryGroupsRequest], - typing.Union[ + Union[ datacatalog.ListEntryGroupsResponse, - typing.Awaitable[datacatalog.ListEntryGroupsResponse], + Awaitable[datacatalog.ListEntryGroupsResponse], ], ]: raise NotImplementedError() @@ -338,56 +419,55 @@ def list_entry_groups( @property def create_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def update_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def delete_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteEntryRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.GetEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def lookup_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.LookupEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def list_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.ListEntriesRequest], - typing.Union[ - datacatalog.ListEntriesResponse, - typing.Awaitable[datacatalog.ListEntriesResponse], + Union[ + datacatalog.ListEntriesResponse, Awaitable[datacatalog.ListEntriesResponse] ], ]: raise NotImplementedError() @@ -395,139 +475,140 @@ def list_entries( @property def create_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateTagTemplateRequest], - typing.Union[tags.TagTemplate, typing.Awaitable[tags.TagTemplate]], + Union[tags.TagTemplate, Awaitable[tags.TagTemplate]], ]: raise NotImplementedError() @property def get_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.GetTagTemplateRequest], - typing.Union[tags.TagTemplate, typing.Awaitable[tags.TagTemplate]], + Union[tags.TagTemplate, Awaitable[tags.TagTemplate]], ]: raise NotImplementedError() @property def update_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateTagTemplateRequest], - typing.Union[tags.TagTemplate, typing.Awaitable[tags.TagTemplate]], + Union[tags.TagTemplate, Awaitable[tags.TagTemplate]], ]: raise NotImplementedError() @property def delete_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteTagTemplateRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def create_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateTagTemplateFieldRequest], - typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], ]: raise NotImplementedError() @property def update_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateTagTemplateFieldRequest], - typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], ]: raise NotImplementedError() @property def rename_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.RenameTagTemplateFieldRequest], - typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], + ]: + raise NotImplementedError() + + @property + def rename_tag_template_field_enum_value( + self, + ) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], ]: raise NotImplementedError() @property def delete_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteTagTemplateFieldRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def create_tag( self, - ) -> typing.Callable[ - [datacatalog.CreateTagRequest], - typing.Union[tags.Tag, typing.Awaitable[tags.Tag]], - ]: + ) -> Callable[[datacatalog.CreateTagRequest], Union[tags.Tag, Awaitable[tags.Tag]]]: raise NotImplementedError() @property def update_tag( self, - ) -> typing.Callable[ - [datacatalog.UpdateTagRequest], - typing.Union[tags.Tag, typing.Awaitable[tags.Tag]], - ]: + ) -> Callable[[datacatalog.UpdateTagRequest], Union[tags.Tag, Awaitable[tags.Tag]]]: raise NotImplementedError() @property def delete_tag( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteTagRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_tags( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.ListTagsRequest], - typing.Union[ - datacatalog.ListTagsResponse, typing.Awaitable[datacatalog.ListTagsResponse] - ], + Union[datacatalog.ListTagsResponse, Awaitable[datacatalog.ListTagsResponse]], ]: raise NotImplementedError() @property def set_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ - [iam_policy.TestIamPermissionsRequest], - typing.Union[ - iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index efca3a63..23befb3b 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import tags -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -55,7 +52,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -69,7 +66,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -179,7 +177,7 @@ def __init__( def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +208,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -240,7 +240,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -375,7 +375,7 @@ def update_entry_group( @property def delete_entry_group( self, - ) -> Callable[[datacatalog.DeleteEntryGroupRequest], empty.Empty]: + ) -> Callable[[datacatalog.DeleteEntryGroupRequest], empty_pb2.Empty]: r"""Return a callable for the delete entry group method over gRPC. Deletes an EntryGroup. Only entry groups that do not contain @@ -399,7 +399,7 @@ def delete_entry_group( self._stubs["delete_entry_group"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteEntryGroup", request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry_group"] @@ -437,8 +437,8 @@ def create_entry( ) -> Callable[[datacatalog.CreateEntryRequest], datacatalog.Entry]: r"""Return a callable for the create entry method over gRPC. - Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -497,7 +497,9 @@ def update_entry( return self._stubs["update_entry"] @property - def delete_entry(self) -> Callable[[datacatalog.DeleteEntryRequest], empty.Empty]: + def delete_entry( + self, + ) -> Callable[[datacatalog.DeleteEntryRequest], empty_pb2.Empty]: r"""Return a callable for the delete entry method over gRPC. Deletes an existing entry. Only entries created through @@ -522,7 +524,7 @@ def delete_entry(self) -> Callable[[datacatalog.DeleteEntryRequest], empty.Empty self._stubs["delete_entry"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteEntry", request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry"] @@ -698,7 +700,7 @@ def update_tag_template( @property def delete_tag_template( self, - ) -> Callable[[datacatalog.DeleteTagTemplateRequest], empty.Empty]: + ) -> Callable[[datacatalog.DeleteTagTemplateRequest], empty_pb2.Empty]: r"""Return a callable for the delete tag template method over gRPC. Deletes a tag template and all tags using the template. Users @@ -721,7 +723,7 @@ def delete_tag_template( self._stubs["delete_tag_template"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplate", request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template"] @@ -816,10 +818,42 @@ def rename_tag_template_field( ) return self._stubs["rename_tag_template_field"] + @property + def rename_tag_template_field_enum_value( + self, + ) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], tags.TagTemplateField + ]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_tag_template_field_enum_value" not in self._stubs: + self._stubs[ + "rename_tag_template_field_enum_value" + ] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue", + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs["rename_tag_template_field_enum_value"] + @property def delete_tag_template_field( self, - ) -> Callable[[datacatalog.DeleteTagTemplateFieldRequest], empty.Empty]: + ) -> Callable[[datacatalog.DeleteTagTemplateFieldRequest], empty_pb2.Empty]: r"""Return a callable for the delete tag template field method over gRPC. Deletes a field in a tag template and all uses of that field. @@ -843,7 +877,7 @@ def delete_tag_template_field( self._stubs["delete_tag_template_field"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplateField", request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template_field"] @@ -901,7 +935,7 @@ def update_tag(self) -> Callable[[datacatalog.UpdateTagRequest], tags.Tag]: return self._stubs["update_tag"] @property - def delete_tag(self) -> Callable[[datacatalog.DeleteTagRequest], empty.Empty]: + def delete_tag(self) -> Callable[[datacatalog.DeleteTagRequest], empty_pb2.Empty]: r"""Return a callable for the delete tag method over gRPC. Deletes a tag. @@ -920,7 +954,7 @@ def delete_tag(self) -> Callable[[datacatalog.DeleteTagRequest], empty.Empty]: self._stubs["delete_tag"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteTag", request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag"] @@ -953,7 +987,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -987,15 +1021,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1033,8 +1067,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -1042,7 +1076,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. @@ -1074,8 +1109,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index 9e712dfe..736a952c 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,25 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import tags -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .grpc import DataCatalogGrpcTransport @@ -58,7 +55,7 @@ class DataCatalogGrpcAsyncIOTransport(DataCatalogTransport): def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -85,13 +82,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -99,7 +98,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -113,7 +112,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -171,7 +171,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -244,7 +243,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier - and high level fields. Clients can subsequentally call ``Get`` + and high level fields. Clients can subsequently call ``Get`` methods. Note that Data Catalog search queries do not guarantee full @@ -385,7 +384,7 @@ def update_entry_group( @property def delete_entry_group( self, - ) -> Callable[[datacatalog.DeleteEntryGroupRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteEntryGroupRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete entry group method over gRPC. Deletes an EntryGroup. Only entry groups that do not contain @@ -409,7 +408,7 @@ def delete_entry_group( self._stubs["delete_entry_group"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteEntryGroup", request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry_group"] @@ -448,8 +447,8 @@ def create_entry( ) -> Callable[[datacatalog.CreateEntryRequest], Awaitable[datacatalog.Entry]]: r"""Return a callable for the create entry method over gRPC. - Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. + Creates an entry. Only entries of types 'FILESET', 'CLUSTER', + 'DATA_STREAM' or with a user-specified type can be created. Users should enable the Data Catalog API in the project identified by the ``parent`` parameter (see [Data Catalog @@ -510,7 +509,7 @@ def update_entry( @property def delete_entry( self, - ) -> Callable[[datacatalog.DeleteEntryRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteEntryRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete entry method over gRPC. Deletes an existing entry. Only entries created through @@ -535,7 +534,7 @@ def delete_entry( self._stubs["delete_entry"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteEntry", request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry"] @@ -715,7 +714,7 @@ def update_tag_template( @property def delete_tag_template( self, - ) -> Callable[[datacatalog.DeleteTagTemplateRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteTagTemplateRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete tag template method over gRPC. Deletes a tag template and all tags using the template. Users @@ -738,7 +737,7 @@ def delete_tag_template( self._stubs["delete_tag_template"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplate", request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template"] @@ -839,10 +838,45 @@ def rename_tag_template_field( ) return self._stubs["rename_tag_template_field"] + @property + def rename_tag_template_field_enum_value( + self, + ) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Awaitable[tags.TagTemplateField], + ]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. The enum + values have to be unique within one enum field. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_tag_template_field_enum_value" not in self._stubs: + self._stubs[ + "rename_tag_template_field_enum_value" + ] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue", + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs["rename_tag_template_field_enum_value"] + @property def delete_tag_template_field( self, - ) -> Callable[[datacatalog.DeleteTagTemplateFieldRequest], Awaitable[empty.Empty]]: + ) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], Awaitable[empty_pb2.Empty] + ]: r"""Return a callable for the delete tag template field method over gRPC. Deletes a field in a tag template and all uses of that field. @@ -866,7 +900,7 @@ def delete_tag_template_field( self._stubs["delete_tag_template_field"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplateField", request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template_field"] @@ -930,7 +964,7 @@ def update_tag( @property def delete_tag( self, - ) -> Callable[[datacatalog.DeleteTagRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteTagRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete tag method over gRPC. Deletes a tag. @@ -949,7 +983,7 @@ def delete_tag( self._stubs["delete_tag"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/DeleteTag", request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag"] @@ -984,7 +1018,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -1018,15 +1052,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1064,8 +1098,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -1073,8 +1107,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. @@ -1106,8 +1140,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1.DataCatalog/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py new file mode 100644 index 00000000..031ae9fa --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PolicyTagManagerClient +from .async_client import PolicyTagManagerAsyncClient + +__all__ = ( + "PolicyTagManagerClient", + "PolicyTagManagerAsyncClient", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py new file mode 100644 index 00000000..e57f8ecb --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -0,0 +1,1246 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport +from .client import PolicyTagManagerClient + + +class PolicyTagManagerAsyncClient: + """Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + """ + + _client: PolicyTagManagerClient + + DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT + + policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) + parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) + taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) + common_billing_account_path = staticmethod( + PolicyTagManagerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PolicyTagManagerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) + parse_common_folder_path = staticmethod( + PolicyTagManagerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PolicyTagManagerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PolicyTagManagerClient.parse_common_organization_path + ) + common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) + parse_common_project_path = staticmethod( + PolicyTagManagerClient.parse_common_project_path + ) + common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) + parse_common_location_path = staticmethod( + PolicyTagManagerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_info.__func__(PolicyTagManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_file.__func__(PolicyTagManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PolicyTagManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_taxonomy( + self, + request: policytagmanager.CreateTaxonomyRequest = None, + *, + parent: str = None, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.CreateTaxonomyRequest`): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + parent (:class:`str`): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): + The taxonomy to be created. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.CreateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_taxonomy( + self, + request: policytagmanager.DeleteTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest`): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + name (:class:`str`): + Required. Resource name of the + taxonomy to be deleted. All policy tags + in this taxonomy will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.DeleteTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def update_taxonomy( + self, + request: policytagmanager.UpdateTaxonomyRequest = None, + *, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest`): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): + The taxonomy to update. Only description, display_name, + and activated policy types can be updated. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.UpdateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("taxonomy.name", request.taxonomy.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_taxonomies( + self, + request: policytagmanager.ListTaxonomiesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesAsyncPager: + r"""Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ListTaxonomiesRequest`): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + parent (:class:`str`): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.ListTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTaxonomiesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_taxonomy( + self, + request: policytagmanager.GetTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.GetTaxonomyRequest`): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + name (:class:`str`): + Required. Resource name of the + requested taxonomy. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.GetTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_policy_tag( + self, + request: policytagmanager.CreatePolicyTagRequest = None, + *, + parent: str = None, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in a taxonomy. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.CreatePolicyTagRequest`): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + parent (:class:`str`): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): + The policy tag to be created. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.CreatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_policy_tag( + self, + request: policytagmanager.DeletePolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.DeletePolicyTagRequest`): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + name (:class:`str`): + Required. Resource name of the policy + tag to be deleted. All of its descendant + policy tags will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.DeletePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def update_policy_tag( + self, + request: policytagmanager.UpdatePolicyTagRequest = None, + *, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest`): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be + updated. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.UpdatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("policy_tag.name", request.policy_tag.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_policy_tags( + self, + request: policytagmanager.ListPolicyTagsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsAsyncPager: + r"""Lists all policy tags in a taxonomy. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ListPolicyTagsRequest`): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + parent (:class:`str`): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.ListPolicyTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_policy_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPolicyTagsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_policy_tag( + self, + request: policytagmanager.GetPolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.GetPolicyTagRequest`): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + name (:class:`str`): + Required. Resource name of the + requested policy tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = policytagmanager.GetPolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a policy tag or a taxonomy. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM policy for a policy tag or a taxonomy. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Args: + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerAsyncClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py new file mode 100644 index 00000000..0b852753 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -0,0 +1,1447 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +class PolicyTagManagerClientMeta(type): + """Metaclass for the PolicyTagManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PolicyTagManagerTransport]] + _transport_registry["grpc"] = PolicyTagManagerGrpcTransport + _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[PolicyTagManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): + """Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def policy_tag_path( + project: str, location: str, taxonomy: str, policy_tag: str, + ) -> str: + """Returns a fully-qualified policy_tag string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( + project=project, + location=location, + taxonomy=taxonomy, + policy_tag=policy_tag, + ) + + @staticmethod + def parse_policy_tag_path(path: str) -> Dict[str, str]: + """Parses a policy_tag path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: + """Returns a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str, str]: + """Parses a taxonomy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerTransport): + # transport is a PolicyTagManagerTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_taxonomy( + self, + request: policytagmanager.CreateTaxonomyRequest = None, + *, + parent: str = None, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Args: + request (google.cloud.datacatalog_v1.types.CreateTaxonomyRequest): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + parent (str): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to be created. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreateTaxonomyRequest): + request = policytagmanager.CreateTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_taxonomy( + self, + request: policytagmanager.DeleteTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Args: + request (google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + name (str): + Required. Resource name of the + taxonomy to be deleted. All policy tags + in this taxonomy will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeleteTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): + request = policytagmanager.DeleteTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def update_taxonomy( + self, + request: policytagmanager.UpdateTaxonomyRequest = None, + *, + taxonomy: policytagmanager.Taxonomy = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Args: + request (google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to update. Only description, display_name, + and activated policy types can be updated. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): + request = policytagmanager.UpdateTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("taxonomy.name", request.taxonomy.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_taxonomies( + self, + request: policytagmanager.ListTaxonomiesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesPager: + r"""Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Args: + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + parent (str): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListTaxonomiesRequest): + request = policytagmanager.ListTaxonomiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTaxonomiesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_taxonomy( + self, + request: policytagmanager.GetTaxonomyRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + Args: + request (google.cloud.datacatalog_v1.types.GetTaxonomyRequest): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + name (str): + Required. Resource name of the + requested taxonomy. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. For instance a "data + sensitivity" taxonomy could contain the following + policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy could contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetTaxonomyRequest): + request = policytagmanager.GetTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_policy_tag( + self, + request: policytagmanager.CreatePolicyTagRequest = None, + *, + parent: str = None, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in a taxonomy. + + Args: + request (google.cloud.datacatalog_v1.types.CreatePolicyTagRequest): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + parent (str): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to be created. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreatePolicyTagRequest): + request = policytagmanager.CreatePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_policy_tag( + self, + request: policytagmanager.DeletePolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. + + Args: + request (google.cloud.datacatalog_v1.types.DeletePolicyTagRequest): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + name (str): + Required. Resource name of the policy + tag to be deleted. All of its descendant + policy tags will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeletePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeletePolicyTagRequest): + request = policytagmanager.DeletePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def update_policy_tag( + self, + request: policytagmanager.UpdatePolicyTagRequest = None, + *, + policy_tag: policytagmanager.PolicyTag = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Args: + request (google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be + updated. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): + request = policytagmanager.UpdatePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("policy_tag.name", request.policy_tag.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_policy_tags( + self, + request: policytagmanager.ListPolicyTagsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsPager: + r"""Lists all policy tags in a taxonomy. + + Args: + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + parent (str): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListPolicyTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListPolicyTagsRequest): + request = policytagmanager.ListPolicyTagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_policy_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPolicyTagsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_policy_tag( + self, + request: policytagmanager.GetPolicyTagRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + Args: + request (google.cloud.datacatalog_v1.types.GetPolicyTagRequest): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + name (str): + Required. Resource name of the + requested policy tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be defined + in a hierarchy. For example, consider the following + hierarchy: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Policy tag "Geolocation" contains 3 child policy + tags: "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetPolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetPolicyTagRequest): + request = policytagmanager.GetPolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a policy tag or a taxonomy. + + Args: + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM policy for a policy tag or a taxonomy. + + Args: + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Args: + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py new file mode 100644 index 00000000..e8f09223 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.datacatalog_v1.types import policytagmanager + + +class ListTaxonomiesPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., policytagmanager.ListTaxonomiesResponse], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[policytagmanager.Taxonomy]: + for page in self.pages: + yield from page.taxonomies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTaxonomiesAsyncPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[policytagmanager.ListTaxonomiesResponse]], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[policytagmanager.Taxonomy]: + async def async_generator(): + async for page in self.pages: + for response in page.taxonomies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPolicyTagsPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., policytagmanager.ListPolicyTagsResponse], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[policytagmanager.PolicyTag]: + for page in self.pages: + yield from page.policy_tags + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPolicyTagsAsyncPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[policytagmanager.ListPolicyTagsResponse]], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[policytagmanager.PolicyTag]: + async def async_generator(): + async for page in self.pages: + for response in page.policy_tags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py new file mode 100644 index 00000000..a527c57d --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerTransport +from .grpc import PolicyTagManagerGrpcTransport +from .grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] +_transport_registry["grpc"] = PolicyTagManagerGrpcTransport +_transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport + +__all__ = ( + "PolicyTagManagerTransport", + "PolicyTagManagerGrpcTransport", + "PolicyTagManagerGrpcAsyncIOTransport", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py new file mode 100644 index 00000000..773ee5a4 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + + +class PolicyTagManagerTransport(abc.ABC): + """Abstract transport class for PolicyTagManager.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "datacatalog.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_taxonomy: gapic_v1.method.wrap_method( + self.create_taxonomy, default_timeout=None, client_info=client_info, + ), + self.delete_taxonomy: gapic_v1.method.wrap_method( + self.delete_taxonomy, default_timeout=None, client_info=client_info, + ), + self.update_taxonomy: gapic_v1.method.wrap_method( + self.update_taxonomy, default_timeout=None, client_info=client_info, + ), + self.list_taxonomies: gapic_v1.method.wrap_method( + self.list_taxonomies, default_timeout=None, client_info=client_info, + ), + self.get_taxonomy: gapic_v1.method.wrap_method( + self.get_taxonomy, default_timeout=None, client_info=client_info, + ), + self.create_policy_tag: gapic_v1.method.wrap_method( + self.create_policy_tag, default_timeout=None, client_info=client_info, + ), + self.delete_policy_tag: gapic_v1.method.wrap_method( + self.delete_policy_tag, default_timeout=None, client_info=client_info, + ), + self.update_policy_tag: gapic_v1.method.wrap_method( + self.update_policy_tag, default_timeout=None, client_info=client_info, + ), + self.list_policy_tags: gapic_v1.method.wrap_method( + self.list_policy_tags, default_timeout=None, client_info=client_info, + ), + self.get_policy_tag: gapic_v1.method.wrap_method( + self.get_policy_tag, default_timeout=None, client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, default_timeout=None, client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, default_timeout=None, client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def create_taxonomy( + self, + ) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]], + ]: + raise NotImplementedError() + + @property + def delete_taxonomy( + self, + ) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_taxonomy( + self, + ) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]], + ]: + raise NotImplementedError() + + @property + def list_taxonomies( + self, + ) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Union[ + policytagmanager.ListTaxonomiesResponse, + Awaitable[policytagmanager.ListTaxonomiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_taxonomy( + self, + ) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]], + ]: + raise NotImplementedError() + + @property + def create_policy_tag( + self, + ) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]], + ]: + raise NotImplementedError() + + @property + def delete_policy_tag( + self, + ) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_policy_tag( + self, + ) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]], + ]: + raise NotImplementedError() + + @property + def list_policy_tags( + self, + ) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Union[ + policytagmanager.ListPolicyTagsResponse, + Awaitable[policytagmanager.ListPolicyTagsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_policy_tag( + self, + ) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("PolicyTagManagerTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py new file mode 100644 index 00000000..fa3dd7c6 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py @@ -0,0 +1,600 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): + """gRPC backend transport for PolicyTagManager. + + Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_taxonomy( + self, + ) -> Callable[[policytagmanager.CreateTaxonomyRequest], policytagmanager.Taxonomy]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Returns: + Callable[[~.CreateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_taxonomy" not in self._stubs: + self._stubs["create_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy", + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["create_taxonomy"] + + @property + def delete_taxonomy( + self, + ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], empty_pb2.Empty]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_taxonomy" not in self._stubs: + self._stubs["delete_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy", + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_taxonomy"] + + @property + def update_taxonomy( + self, + ) -> Callable[[policytagmanager.UpdateTaxonomyRequest], policytagmanager.Taxonomy]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_taxonomy" not in self._stubs: + self._stubs["update_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy", + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["update_taxonomy"] + + @property + def list_taxonomies( + self, + ) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + policytagmanager.ListTaxonomiesResponse, + ]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + ~.ListTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_taxonomies" not in self._stubs: + self._stubs["list_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies", + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs["list_taxonomies"] + + @property + def get_taxonomy( + self, + ) -> Callable[[policytagmanager.GetTaxonomyRequest], policytagmanager.Taxonomy]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_taxonomy" not in self._stubs: + self._stubs["get_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy", + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["get_taxonomy"] + + @property + def create_policy_tag( + self, + ) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], policytagmanager.PolicyTag + ]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in a taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_policy_tag" not in self._stubs: + self._stubs["create_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag", + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["create_policy_tag"] + + @property + def delete_policy_tag( + self, + ) -> Callable[[policytagmanager.DeletePolicyTagRequest], empty_pb2.Empty]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. + + Returns: + Callable[[~.DeletePolicyTagRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_policy_tag" not in self._stubs: + self._stubs["delete_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag", + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_policy_tag"] + + @property + def update_policy_tag( + self, + ) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], policytagmanager.PolicyTag + ]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_policy_tag" not in self._stubs: + self._stubs["update_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag", + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["update_policy_tag"] + + @property + def list_policy_tags( + self, + ) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + policytagmanager.ListPolicyTagsResponse, + ]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + ~.ListPolicyTagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_policy_tags" not in self._stubs: + self._stubs["list_policy_tags"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags", + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs["list_policy_tags"] + + @property + def get_policy_tag( + self, + ) -> Callable[[policytagmanager.GetPolicyTagRequest], policytagmanager.PolicyTag]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_policy_tag" not in self._stubs: + self._stubs["get_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag", + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["get_policy_tag"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PolicyTagManagerGrpcTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py new file mode 100644 index 00000000..ad559282 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -0,0 +1,613 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerGrpcTransport + + +class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): + """gRPC AsyncIO backend transport for PolicyTagManager. + + Policy Tag Manager API service allows clients to manage their + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_taxonomy( + self, + ) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], Awaitable[policytagmanager.Taxonomy] + ]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in a specified project. The + taxonomy is initially empty, i.e., does not contain + policy tags. + + Returns: + Callable[[~.CreateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_taxonomy" not in self._stubs: + self._stubs["create_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy", + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["create_taxonomy"] + + @property + def delete_taxonomy( + self, + ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy. This method will also delete all + policy tags in this taxonomy, their associated policies, + and the policy tags references from BigQuery columns. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_taxonomy" not in self._stubs: + self._stubs["delete_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy", + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_taxonomy"] + + @property + def update_taxonomy( + self, + ) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], Awaitable[policytagmanager.Taxonomy] + ]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy. This method can update the + taxonomy's display name, description, and activated + policy types. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_taxonomy" not in self._stubs: + self._stubs["update_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy", + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["update_taxonomy"] + + @property + def list_taxonomies( + self, + ) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Awaitable[policytagmanager.ListTaxonomiesResponse], + ]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + Awaitable[~.ListTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_taxonomies" not in self._stubs: + self._stubs["list_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies", + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs["list_taxonomies"] + + @property + def get_taxonomy( + self, + ) -> Callable[ + [policytagmanager.GetTaxonomyRequest], Awaitable[policytagmanager.Taxonomy] + ]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_taxonomy" not in self._stubs: + self._stubs["get_taxonomy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy", + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs["get_taxonomy"] + + @property + def create_policy_tag( + self, + ) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], Awaitable[policytagmanager.PolicyTag] + ]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in a taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_policy_tag" not in self._stubs: + self._stubs["create_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag", + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["create_policy_tag"] + + @property + def delete_policy_tag( + self, + ) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag. This method also deletes: + + - all of its descendant policy tags, if any + - the policies associated with the policy tag and its + descendants + - references from BigQuery table schema of the policy tag and + its descendants. + + Returns: + Callable[[~.DeletePolicyTagRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_policy_tag" not in self._stubs: + self._stubs["delete_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag", + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_policy_tag"] + + @property + def update_policy_tag( + self, + ) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], Awaitable[policytagmanager.PolicyTag] + ]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag. This method can update the + policy tag's display name, description, and parent + policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_policy_tag" not in self._stubs: + self._stubs["update_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag", + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["update_policy_tag"] + + @property + def list_policy_tags( + self, + ) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Awaitable[policytagmanager.ListPolicyTagsResponse], + ]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + Awaitable[~.ListPolicyTagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_policy_tags" not in self._stubs: + self._stubs["list_policy_tags"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags", + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs["list_policy_tags"] + + @property + def get_policy_tag( + self, + ) -> Callable[ + [policytagmanager.GetPolicyTagRequest], Awaitable[policytagmanager.PolicyTag] + ]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_policy_tag" not in self._stubs: + self._stubs["get_policy_tag"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag", + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs["get_policy_tag"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the permissions that a caller has on a + specified policy tag or taxonomy. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PolicyTagManagerGrpcAsyncIOTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py new file mode 100644 index 00000000..51e547a2 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PolicyTagManagerSerializationClient +from .async_client import PolicyTagManagerSerializationAsyncClient + +__all__ = ( + "PolicyTagManagerSerializationClient", + "PolicyTagManagerSerializationAsyncClient", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py new file mode 100644 index 00000000..99f190e8 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport +from .client import PolicyTagManagerSerializationClient + + +class PolicyTagManagerSerializationAsyncClient: + """Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + """ + + _client: PolicyTagManagerSerializationClient + + DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT + + taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) + parse_taxonomy_path = staticmethod( + PolicyTagManagerSerializationClient.parse_taxonomy_path + ) + common_billing_account_path = staticmethod( + PolicyTagManagerSerializationClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + PolicyTagManagerSerializationClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PolicyTagManagerSerializationClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_organization_path + ) + common_project_path = staticmethod( + PolicyTagManagerSerializationClient.common_project_path + ) + parse_common_project_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_project_path + ) + common_location_path = staticmethod( + PolicyTagManagerSerializationClient.common_location_path + ) + parse_common_location_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_info.__func__(PolicyTagManagerSerializationAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_file.__func__(PolicyTagManagerSerializationAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PolicyTagManagerSerializationClient).get_transport_class, + type(PolicyTagManagerSerializationClient), + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PolicyTagManagerSerializationClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def import_taxonomies( + self, + request: policytagmanagerserialization.ImportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest`): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def export_taxonomies( + self, + request: policytagmanagerserialization.ExportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Args: + request (:class:`google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest`): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerSerializationAsyncClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py new file mode 100644 index 00000000..c0339ae5 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -0,0 +1,489 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerSerializationGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +class PolicyTagManagerSerializationClientMeta(type): + """Metaclass for the PolicyTagManagerSerialization client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] + _transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = PolicyTagManagerSerializationGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[PolicyTagManagerSerializationTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerSerializationClient( + metaclass=PolicyTagManagerSerializationClientMeta +): + """Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: + """Returns a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str, str]: + """Parses a taxonomy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerSerializationTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerSerializationTransport): + # transport is a PolicyTagManagerSerializationTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def import_taxonomies( + self, + request: policytagmanagerserialization.ImportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Args: + request (google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ImportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, policytagmanagerserialization.ImportTaxonomiesRequest + ): + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def export_taxonomies( + self, + request: policytagmanagerserialization.ExportTaxonomiesRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Args: + request (google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ExportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, policytagmanagerserialization.ExportTaxonomiesRequest + ): + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PolicyTagManagerSerializationClient",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py new file mode 100644 index 00000000..a198169c --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerSerializationTransport +from .grpc import PolicyTagManagerSerializationGrpcTransport +from .grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] +_transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport +_transport_registry["grpc_asyncio"] = PolicyTagManagerSerializationGrpcAsyncIOTransport + +__all__ = ( + "PolicyTagManagerSerializationTransport", + "PolicyTagManagerSerializationGrpcTransport", + "PolicyTagManagerSerializationGrpcAsyncIOTransport", +) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py new file mode 100644 index 00000000..fb883d4b --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanagerserialization + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datacatalog", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + + +class PolicyTagManagerSerializationTransport(abc.ABC): + """Abstract transport class for PolicyTagManagerSerialization.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "datacatalog.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.import_taxonomies: gapic_v1.method.wrap_method( + self.import_taxonomies, default_timeout=None, client_info=client_info, + ), + self.export_taxonomies: gapic_v1.method.wrap_method( + self.export_taxonomies, default_timeout=None, client_info=client_info, + ), + } + + @property + def import_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Union[ + policytagmanagerserialization.ImportTaxonomiesResponse, + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def export_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Union[ + policytagmanagerserialization.ExportTaxonomiesResponse, + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("PolicyTagManagerSerializationTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py new file mode 100644 index 00000000..2c96fd41 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerSerializationGrpcTransport( + PolicyTagManagerSerializationTransport +): + """gRPC backend transport for PolicyTagManagerSerialization. + + Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def import_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + policytagmanagerserialization.ImportTaxonomiesResponse, + ]: + r"""Return a callable for the import taxonomies method over gRPC. + + Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + ~.ImportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_taxonomies" not in self._stubs: + self._stubs["import_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies", + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs["import_taxonomies"] + + @property + def export_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + policytagmanagerserialization.ExportTaxonomiesResponse, + ]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + ~.ExportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_taxonomies" not in self._stubs: + self._stubs["export_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies", + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs["export_taxonomies"] + + +__all__ = ("PolicyTagManagerSerializationGrpcTransport",) diff --git a/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py new file mode 100644 index 00000000..6b20aaf2 --- /dev/null +++ b/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerSerializationGrpcTransport + + +class PolicyTagManagerSerializationGrpcAsyncIOTransport( + PolicyTagManagerSerializationTransport +): + """gRPC AsyncIO backend transport for PolicyTagManagerSerialization. + + Policy Tag Manager serialization API service allows clients + to manipulate their policy tags and taxonomies in serialized + format, where taxonomy is a hierarchical group of policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datacatalog.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def import_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse], + ]: + r"""Return a callable for the import taxonomies method over gRPC. + + Creates new taxonomies (including their policy tags) + by importing from inlined source or cross-regional + source. New taxonomies will be created in a given parent + project. + + If using the cross-regional source, a new taxonomy is + created by copying from a source in another region. + + If using the inlined source, this method provides a way + to bulk create taxonomies and policy tags using nested + proto structure. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + Awaitable[~.ImportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_taxonomies" not in self._stubs: + self._stubs["import_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies", + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs["import_taxonomies"] + + @property + def export_taxonomies( + self, + ) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse], + ]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports taxonomies as the requested type and returns + the taxonomies including their policy tags. The + requested taxonomies must belong to one project. + + SerializedTaxonomy protos with nested policy tags that + are generated by this method can be used as input for + future ImportTaxonomies calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + Awaitable[~.ExportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_taxonomies" not in self._stubs: + self._stubs["export_taxonomies"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies", + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs["export_taxonomies"] + + +__all__ = ("PolicyTagManagerSerializationGrpcAsyncIOTransport",) diff --git a/google/cloud/datacatalog_v1/types/__init__.py b/google/cloud/datacatalog_v1/types/__init__.py index fc60cdfe..ef3be71a 100644 --- a/google/cloud/datacatalog_v1/types/__init__.py +++ b/google/cloud/datacatalog_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +from .data_source import DataSource from .datacatalog import ( CreateEntryGroupRequest, CreateEntryRequest, CreateTagRequest, CreateTagTemplateFieldRequest, CreateTagTemplateRequest, + DatabaseTableSpec, DeleteEntryGroupRequest, DeleteEntryRequest, DeleteTagRequest, @@ -38,6 +38,7 @@ ListTagsRequest, ListTagsResponse, LookupEntryRequest, + RenameTagTemplateFieldEnumValueRequest, RenameTagTemplateFieldRequest, SearchCatalogRequest, SearchCatalogResponse, @@ -52,6 +53,32 @@ GcsFilesetSpec, GcsFileSpec, ) +from .policytagmanager import ( + CreatePolicyTagRequest, + CreateTaxonomyRequest, + DeletePolicyTagRequest, + DeleteTaxonomyRequest, + GetPolicyTagRequest, + GetTaxonomyRequest, + ListPolicyTagsRequest, + ListPolicyTagsResponse, + ListTaxonomiesRequest, + ListTaxonomiesResponse, + PolicyTag, + Taxonomy, + UpdatePolicyTagRequest, + UpdateTaxonomyRequest, +) +from .policytagmanagerserialization import ( + CrossRegionalSource, + ExportTaxonomiesRequest, + ExportTaxonomiesResponse, + ImportTaxonomiesRequest, + ImportTaxonomiesResponse, + InlineSource, + SerializedPolicyTag, + SerializedTaxonomy, +) from .schema import ( ColumnSchema, Schema, @@ -78,11 +105,13 @@ __all__ = ( "IntegratedSystem", + "DataSource", "CreateEntryGroupRequest", "CreateEntryRequest", "CreateTagRequest", "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", + "DatabaseTableSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeleteTagRequest", @@ -100,6 +129,7 @@ "ListTagsRequest", "ListTagsResponse", "LookupEntryRequest", + "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "SearchCatalogRequest", "SearchCatalogResponse", @@ -111,6 +141,28 @@ "EntryType", "GcsFilesetSpec", "GcsFileSpec", + "CreatePolicyTagRequest", + "CreateTaxonomyRequest", + "DeletePolicyTagRequest", + "DeleteTaxonomyRequest", + "GetPolicyTagRequest", + "GetTaxonomyRequest", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", + "PolicyTag", + "Taxonomy", + "UpdatePolicyTagRequest", + "UpdateTaxonomyRequest", + "CrossRegionalSource", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", + "ImportTaxonomiesRequest", + "ImportTaxonomiesResponse", + "InlineSource", + "SerializedPolicyTag", + "SerializedTaxonomy", "ColumnSchema", "Schema", "SearchCatalogResult", diff --git a/google/cloud/datacatalog_v1/types/common.py b/google/cloud/datacatalog_v1/types/common.py index feace354..9bc6ad34 100644 --- a/google/cloud/datacatalog_v1/types/common.py +++ b/google/cloud/datacatalog_v1/types/common.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -30,6 +28,7 @@ class IntegratedSystem(proto.Enum): INTEGRATED_SYSTEM_UNSPECIFIED = 0 BIGQUERY = 1 CLOUD_PUBSUB = 2 + DATAPROC_METASTORE = 3 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/data_source.py b/google/cloud/datacatalog_v1/types/data_source.py new file mode 100644 index 00000000..6ddc6fcc --- /dev/null +++ b/google/cloud/datacatalog_v1/types/data_source.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datacatalog.v1", manifest={"DataSource",}, +) + + +class DataSource(proto.Message): + r"""Describes the physical location of an entry. + Attributes: + service (google.cloud.datacatalog_v1.types.DataSource.Service): + Service in which the data is physically + stored. + resource (str): + Full name of the resource as defined by the service, e.g. + //bigquery.googleapis.com/projects/{project_id}/locations/{location}/datasets/{dataset_id}/tables/{table_id} + """ + + class Service(proto.Enum): + r"""Service name where the data is stored.""" + SERVICE_UNSPECIFIED = 0 + CLOUD_STORAGE = 1 + BIGQUERY = 2 + + service = proto.Field(proto.ENUM, number=1, enum=Service,) + resource = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/datacatalog.py b/google/cloud/datacatalog_v1/types/datacatalog.py index e90fb67a..2437039d 100644 --- a/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/google/cloud/datacatalog_v1/types/datacatalog.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,18 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source as gcd_data_source from google.cloud.datacatalog_v1.types import gcs_fileset_spec as gcd_gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema as gcd_schema from google.cloud.datacatalog_v1.types import search from google.cloud.datacatalog_v1.types import table_spec from google.cloud.datacatalog_v1.types import tags as gcd_tags from google.cloud.datacatalog_v1.types import timestamps -from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore __protobuf__ = proto.module( @@ -46,6 +44,7 @@ "GetEntryRequest", "LookupEntryRequest", "Entry", + "DatabaseTableSpec", "EntryGroup", "CreateTagTemplateRequest", "GetTagTemplateRequest", @@ -57,6 +56,7 @@ "CreateTagTemplateFieldRequest", "UpdateTagTemplateFieldRequest", "RenameTagTemplateFieldRequest", + "RenameTagTemplateFieldEnumValueRequest", "DeleteTagTemplateFieldRequest", "ListTagsRequest", "ListTagsResponse", @@ -76,6 +76,8 @@ class EntryType(proto.Enum): MODEL = 5 DATA_STREAM = 3 FILESET = 4 + DATABASE = 7 + SERVICE = 14 class SearchCatalogRequest(proto.Message): @@ -89,8 +91,9 @@ class SearchCatalogRequest(proto.Message): false ``include_gcp_public_datasets`` is considered invalid. Data Catalog will return an error in such a case. query (str): - Required. The query string in search query syntax. The query - must be non-empty. + Optional. The query string in search query syntax. An empty + query string will result in all data assets (in the + specified scope) that the user has access to. Query strings can be simple as "x" or more qualified as: @@ -150,60 +153,27 @@ class Scope(proto.Message): Optional. The list of locations to search within. 1. If empty, search will be performed in all locations; - 2. If any of the locations are NOT in the valid locations - list, error will be returned; + 2. If any of the locations are NOT `supported + regions `__, + error will be returned; 3. Otherwise, search only the given locations for matching results. Typical usage is to leave this field empty. When a location is unreachable as returned in the ``SearchCatalogResponse.unreachable`` field, users can repeat the search request with this parameter set to get additional information on the error. - - Valid locations: - - - asia-east1 - - asia-east2 - - asia-northeast1 - - asia-northeast2 - - asia-northeast3 - - asia-south1 - - asia-southeast1 - - australia-southeast1 - - eu - - europe-north1 - - europe-west1 - - europe-west2 - - europe-west3 - - europe-west4 - - europe-west6 - - global - - northamerica-northeast1 - - southamerica-east1 - - us - - us-central1 - - us-east1 - - us-east4 - - us-west1 - - us-west2 """ - include_org_ids = proto.RepeatedField(proto.STRING, number=2) - - include_project_ids = proto.RepeatedField(proto.STRING, number=3) - - include_gcp_public_datasets = proto.Field(proto.BOOL, number=7) - - restricted_locations = proto.RepeatedField(proto.STRING, number=16) + include_org_ids = proto.RepeatedField(proto.STRING, number=2,) + include_project_ids = proto.RepeatedField(proto.STRING, number=3,) + include_gcp_public_datasets = proto.Field(proto.BOOL, number=7,) + restricted_locations = proto.RepeatedField(proto.STRING, number=16,) scope = proto.Field(proto.MESSAGE, number=6, message=Scope,) - - query = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - order_by = proto.Field(proto.STRING, number=5) + query = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + order_by = proto.Field(proto.STRING, number=5,) class SearchCatalogResponse(proto.Message): @@ -221,7 +191,7 @@ class SearchCatalogResponse(proto.Message): from those locations. Users can get additional information on the error by repeating the search request with a more restrictive parameter -- setting the value for - ``SearchDataCatalogRequest.scope.include_locations``. + ``SearchDataCatalogRequest.scope.restricted_locations``. """ @property @@ -231,10 +201,8 @@ def raw_page(self): results = proto.RepeatedField( proto.MESSAGE, number=1, message=search.SearchCatalogResult, ) - - next_page_token = proto.Field(proto.STRING, number=3) - - unreachable = proto.RepeatedField(proto.STRING, number=6) + next_page_token = proto.Field(proto.STRING, number=3,) + unreachable = proto.RepeatedField(proto.STRING, number=6,) class CreateEntryGroupRequest(proto.Message): @@ -243,28 +211,26 @@ class CreateEntryGroupRequest(proto.Message): Attributes: parent (str): - Required. The name of the project this entry group is in. - Example: + Required. The name of the project this entry group belongs + to. Example: - - projects/{project_id}/locations/{location} + ``projects/{project_id}/locations/{location}`` - Note that this EntryGroup and its child resources may not - actually be stored in the location in this name. + Note: The entry group itself and its child resources might + not be stored in the location specified in its name. entry_group_id (str): - Required. The id of the entry group to - create. The id must begin with a letter or - underscore, contain only English letters, - numbers and underscores, and be at most 64 - characters. + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and must start with a letter or underscore. + The maximum size is 64 bytes when encoded in UTF-8. entry_group (google.cloud.datacatalog_v1.types.EntryGroup): The entry group to create. Defaults to an empty entry group. """ - parent = proto.Field(proto.STRING, number=1) - - entry_group_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + entry_group_id = proto.Field(proto.STRING, number=3,) entry_group = proto.Field(proto.MESSAGE, number=2, message="EntryGroup",) @@ -277,14 +243,18 @@ class UpdateEntryGroupRequest(proto.Message): Required. The updated entry group. "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry group. If - absent or empty, all modifiable fields are - updated. + Names of fields whose values to overwrite on + an entry group. + If this parameter is absent or empty, all + modifiable fields are overwritten. If such + fields are non-required and omitted in the + request body, their values are emptied. """ entry_group = proto.Field(proto.MESSAGE, number=1, message="EntryGroup",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class GetEntryGroupRequest(proto.Message): @@ -300,9 +270,8 @@ class GetEntryGroupRequest(proto.Message): all fields are returned. """ - name = proto.Field(proto.STRING, number=1) - - read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + name = proto.Field(proto.STRING, number=1,) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,) class DeleteEntryGroupRequest(proto.Message): @@ -318,9 +287,8 @@ class DeleteEntryGroupRequest(proto.Message): entry group. """ - name = proto.Field(proto.STRING, number=1) - - force = proto.Field(proto.BOOL, number=2) + name = proto.Field(proto.STRING, number=1,) + force = proto.Field(proto.BOOL, number=2,) class ListEntryGroupsRequest(proto.Message): @@ -342,11 +310,9 @@ class ListEntryGroupsRequest(proto.Message): requested. If empty, the first page is returned. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListEntryGroupsResponse(proto.Message): @@ -367,8 +333,7 @@ def raw_page(self): return self entry_groups = proto.RepeatedField(proto.MESSAGE, number=1, message="EntryGroup",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateEntryRequest(proto.Message): @@ -377,23 +342,25 @@ class CreateEntryRequest(proto.Message): Attributes: parent (str): - Required. The name of the entry group this entry is in. + Required. The name of the entry group this entry belongs to. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not be + stored in the location specified in its name. entry_id (str): - Required. The id of the entry to create. + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + and underscores (_). The maximum size is 64 bytes when + encoded in UTF-8. entry (google.cloud.datacatalog_v1.types.Entry): Required. The entry to create. """ - parent = proto.Field(proto.STRING, number=1) - - entry_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + entry_id = proto.Field(proto.STRING, number=3,) entry = proto.Field(proto.MESSAGE, number=2, message="Entry",) @@ -406,8 +373,11 @@ class UpdateEntryRequest(proto.Message): Required. The updated entry. The "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the entry. If absent or empty, all - modifiable fields are updated. + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied. The following fields are modifiable: @@ -415,7 +385,7 @@ class UpdateEntryRequest(proto.Message): - ``schema`` - - For entries with type ``FILESET`` + - For entries with type ``FILESET``: - ``schema`` - ``display_name`` @@ -423,20 +393,21 @@ class UpdateEntryRequest(proto.Message): - ``gcs_fileset_spec`` - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type`` + - For entries with ``user_specified_type``: - ``schema`` - ``display_name`` - ``description`` - - user_specified_type - - user_specified_system - - linked_resource - - source_system_timestamps + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` """ entry = proto.Field(proto.MESSAGE, number=1, message="Entry",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class DeleteEntryRequest(proto.Message): @@ -450,7 +421,7 @@ class DeleteEntryRequest(proto.Message): - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class GetEntryRequest(proto.Message): @@ -464,7 +435,7 @@ class GetEntryRequest(proto.Message): - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class LookupEntryRequest(proto.Message): @@ -493,14 +464,30 @@ class LookupEntryRequest(proto.Message): - ``bigquery.dataset.project_id.dataset_id`` - ``datacatalog.entry.project_id.location_id.entry_group_id.entry_id`` - ``*_id``\ s shoud satisfy the standard SQL rules for + ``*_id``\ s should satisfy the standard SQL rules for identifiers. https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. - """ + fully_qualified_name (str): + Fully qualified name (FQN) of the resource. - linked_resource = proto.Field(proto.STRING, number=1, oneof="target_name") + FQNs take two forms: - sql_resource = proto.Field(proto.STRING, number=3, oneof="target_name") + - For non-regionalized resources: + + ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + - For regionalized resources: + + ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + Example for a DPMS table: + + ``dataproc_metastore:project_id.location_id.instance_id.database_id.table_id`` + """ + + linked_resource = proto.Field(proto.STRING, number=1, oneof="target_name",) + sql_resource = proto.Field(proto.STRING, number=3, oneof="target_name",) + fully_qualified_name = proto.Field(proto.STRING, number=5, oneof="target_name",) class Entry(proto.Message): @@ -516,13 +503,13 @@ class Entry(proto.Message): Attributes: name (str): - The Data Catalog resource name of the entry in URL format. + Output only. The resource name of an entry in URL format. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Entry and its child resources may not - actually be stored in the location in this name. + Note: The entry itself and its child resources might not be + stored in the location specified in its name. linked_resource (str): The resource this metadata entry refers to. @@ -532,11 +519,38 @@ class Entry(proto.Message): For example, the ``linked_resource`` for a table resource from BigQuery is: - - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + ``//bigquery.googleapis.com/projects/{projectId}/datasets/{datasetId}/tables/{tableId}`` + + Output only when entry is one of the types in the + ``EntryType`` enum. + + For entries with a ``user_specified_type``, this field is + optional and defaults to an empty string. - Output only when Entry is of type in the EntryType enum. For - entries with user_specified_type, this field is optional and - defaults to an empty string. + The resource string must contain only letters (a-z, A-Z), + numbers (0-9), underscores (_), periods (.), colons (:), + slashes (/), dashes (-), and hashes (#). The maximum size is + 200 bytes when encoded in UTF-8. + fully_qualified_name (str): + Fully qualified name (FQN) of the resource. Set + automatically for entries representing resources from synced + systems. Settable only during creation and read-only + afterwards. Can be used for search and lookup of the + entries. + + FQNs take two forms: + + - For non-regionalized resources: + + ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + - For regionalized resources: + + ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + Example for a DPMS table: + + ``dataproc_metastore:project_id.location_id.instance_id.database_id.table_id`` type_ (google.cloud.datacatalog_v1.types.EntryType): The type of the entry. Only used for Entries with types in the @@ -577,16 +591,26 @@ class Entry(proto.Message): Specification for a group of BigQuery tables with name pattern ``[prefix]YYYYMMDD``. Context: https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding. + database_table_spec (google.cloud.datacatalog_v1.types.DatabaseTableSpec): + Specification that applies to a table resource. Only valid + for entries of ``TABLE`` type. display_name (str): - Display information such as title and - description. A short name to identify the entry, - for example, "Analytics Data - Jan 2011". - Default value is an empty string. + Display name of an entry. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum size is 200 bytes when encoded + in UTF-8. Default value is an empty string. description (str): - Entry description, which can consist of - several sentences or paragraphs that describe - entry contents. Default value is an empty - string. + Entry description that can consist of several + sentences or paragraphs that describe entry + contents. + The description must not contain Unicode non- + characters as well as C0 and C1 control codes + except tabs (HT), new lines (LF), carriage + returns (CR), and page breaks (FF). + The maximum size is 2000 bytes when encoded in + UTF-8. Default value is an empty string. schema (google.cloud.datacatalog_v1.types.Schema): Schema of the entry. An entry might not have any schema attached to it. @@ -595,52 +619,67 @@ class Entry(proto.Message): Data Catalog entry. Output only when Entry is of type in the EntryType enum. For entries with user_specified_type, this field is optional and defaults to an empty timestamp. + data_source (google.cloud.datacatalog_v1.types.DataSource): + Output only. Physical location of the entry. """ - name = proto.Field(proto.STRING, number=1) - - linked_resource = proto.Field(proto.STRING, number=9) - + name = proto.Field(proto.STRING, number=1,) + linked_resource = proto.Field(proto.STRING, number=9,) + fully_qualified_name = proto.Field(proto.STRING, number=29,) type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) - - user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type") - + user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type",) integrated_system = proto.Field( proto.ENUM, number=17, oneof="system", enum=common.IntegratedSystem, ) - - user_specified_system = proto.Field(proto.STRING, number=18, oneof="system") - + user_specified_system = proto.Field(proto.STRING, number=18, oneof="system",) gcs_fileset_spec = proto.Field( proto.MESSAGE, number=6, oneof="type_spec", message=gcd_gcs_fileset_spec.GcsFilesetSpec, ) - bigquery_table_spec = proto.Field( proto.MESSAGE, number=12, oneof="type_spec", message=table_spec.BigQueryTableSpec, ) - bigquery_date_sharded_spec = proto.Field( proto.MESSAGE, number=15, oneof="type_spec", message=table_spec.BigQueryDateShardedSpec, ) - - display_name = proto.Field(proto.STRING, number=3) - - description = proto.Field(proto.STRING, number=4) - + database_table_spec = proto.Field( + proto.MESSAGE, number=24, oneof="spec", message="DatabaseTableSpec", + ) + display_name = proto.Field(proto.STRING, number=3,) + description = proto.Field(proto.STRING, number=4,) schema = proto.Field(proto.MESSAGE, number=5, message=gcd_schema.Schema,) - source_system_timestamps = proto.Field( proto.MESSAGE, number=7, message=timestamps.SystemTimestamps, ) + data_source = proto.Field( + proto.MESSAGE, number=20, message=gcd_data_source.DataSource, + ) + + +class DatabaseTableSpec(proto.Message): + r"""Specification that applies to a table resource. Only valid for + entries of ``TABLE`` type. + + Attributes: + type_ (google.cloud.datacatalog_v1.types.DatabaseTableSpec.TableType): + Type of this table. + """ + + class TableType(proto.Enum): + r"""Type of the table.""" + TABLE_TYPE_UNSPECIFIED = 0 + NATIVE = 1 + EXTERNAL = 2 + + type_ = proto.Field(proto.ENUM, number=1, enum=TableType,) class EntryGroup(proto.Message): @@ -652,10 +691,10 @@ class EntryGroup(proto.Message): name (str): The resource name of the entry group in URL format. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`` - Note that this EntryGroup and its child resources may not - actually be stored in the location in this name. + Note: The entry group itself and its child resources might + not be stored in the location specified in its name. display_name (str): A short name to identify the entry group, for example, "analytics data - jan 2011". Default @@ -670,12 +709,9 @@ class EntryGroup(proto.Message): EntryGroup. Default value is empty timestamps. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) data_catalog_timestamps = proto.Field( proto.MESSAGE, number=4, message=timestamps.SystemTimestamps, ) @@ -694,16 +730,18 @@ class CreateTagTemplateRequest(proto.Message): - projects/{project_id}/locations/us-central1 tag_template_id (str): - Required. The id of the tag template to - create. + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), numbers + (0-9), or underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. tag_template (google.cloud.datacatalog_v1.types.TagTemplate): Required. The tag template to create. """ - parent = proto.Field(proto.STRING, number=1) - - tag_template_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + tag_template_id = proto.Field(proto.STRING, number=3,) tag_template = proto.Field(proto.MESSAGE, number=2, message=gcd_tags.TagTemplate,) @@ -718,7 +756,7 @@ class GetTagTemplateRequest(proto.Message): - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateTagTemplateRequest(proto.Message): @@ -730,20 +768,19 @@ class UpdateTagTemplateRequest(proto.Message): Required. The template to update. The "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The field mask specifies the parts of the template to - overwrite. + Names of fields whose values to overwrite on a tag template. + Currently, only ``display_name`` can be overwritten. - Allowed fields: - - - ``display_name`` - - If absent or empty, all of the allowed fields above will be - updated. + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. """ tag_template = proto.Field(proto.MESSAGE, number=1, message=gcd_tags.TagTemplate,) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class DeleteTagTemplateRequest(proto.Message): @@ -762,9 +799,8 @@ class DeleteTagTemplateRequest(proto.Message): the future. """ - name = proto.Field(proto.STRING, number=1) - - force = proto.Field(proto.BOOL, number=2) + name = proto.Field(proto.STRING, number=1,) + force = proto.Field(proto.BOOL, number=2,) class CreateTagRequest(proto.Message): @@ -774,18 +810,18 @@ class CreateTagRequest(proto.Message): Attributes: parent (str): Required. The name of the resource to attach this tag to. - Tags can be attached to Entries. Example: + Tags can be attached to entries. An entry can have up to + 1000 attached tags. Example: - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}`` - Note that this Tag and its child resources may not actually - be stored in the location in this name. + Note: The tag and its child resources might not be stored in + the location specified in its name. tag (google.cloud.datacatalog_v1.types.Tag): Required. The tag to create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) tag = proto.Field(proto.MESSAGE, number=2, message=gcd_tags.Tag,) @@ -798,14 +834,20 @@ class UpdateTagRequest(proto.Message): Required. The updated tag. The "name" field must be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only modifiable - field is the field ``fields``. + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the name + ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. """ tag = proto.Field(proto.MESSAGE, number=1, message=gcd_tags.Tag,) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class DeleteTagRequest(proto.Message): @@ -819,7 +861,7 @@ class DeleteTagRequest(proto.Message): - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateTagTemplateFieldRequest(proto.Message): @@ -835,19 +877,22 @@ class CreateTagTemplateFieldRequest(proto.Message): - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} tag_template_field_id (str): - Required. The ID of the tag template field to create. Field - ids can contain letters (both uppercase and lowercase), - numbers (0-9), underscores (_) and dashes (-). Field IDs - must be at least 1 character long and at most 128 characters - long. Field IDs must also be unique within their template. + Required. The ID of the tag template field to create. + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes (-). + Field IDs must be at least 1 character long and at most 128 + characters long. Field IDs must also be unique within their + template. tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): Required. The tag template field to create. """ - parent = proto.Field(proto.STRING, number=1) - - tag_template_field_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + tag_template_field_id = proto.Field(proto.STRING, number=2,) tag_template_field = proto.Field( proto.MESSAGE, number=3, message=gcd_tags.TagTemplateField, ) @@ -865,30 +910,33 @@ class UpdateTagTemplateFieldRequest(proto.Message): tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): Required. The template to update. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The field mask specifies the parts of the template - to be updated. Allowed fields: + Optional. Names of fields whose values to overwrite on an + individual field of a tag template. The following fields are + modifiable: - ``display_name`` - ``type.enum_type`` - ``is_required`` - If ``update_mask`` is not set or empty, all of the allowed - fields above will be updated. + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied with one + exception: when updating an enum type, the provided values + are merged with the existing values. Therefore, enum values + can only be added, existing enum values cannot be deleted or + renamed. - When updating an enum type, the provided values will be - merged with the existing values. Therefore, enum values can - only be added, existing enum values cannot be deleted nor - renamed. Updating a template field from optional to required - is NOT allowed. + Additionally, updating a template field from optional to + required is *not* allowed. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) tag_template_field = proto.Field( proto.MESSAGE, number=2, message=gcd_tags.TagTemplateField, ) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class RenameTagTemplateFieldRequest(proto.Message): @@ -905,9 +953,26 @@ class RenameTagTemplateFieldRequest(proto.Message): example, ``my_new_field``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) + new_tag_template_field_id = proto.Field(proto.STRING, number=2,) + + +class RenameTagTemplateFieldEnumValueRequest(proto.Message): + r"""Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - new_tag_template_field_id = proto.Field(proto.STRING, number=2) + Attributes: + name (str): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + """ + + name = proto.Field(proto.STRING, number=1,) + new_enum_value_display_name = proto.Field(proto.STRING, number=2,) class DeleteTagTemplateFieldRequest(proto.Message): @@ -927,9 +992,8 @@ class DeleteTagTemplateFieldRequest(proto.Message): in the future. """ - name = proto.Field(proto.STRING, number=1) - - force = proto.Field(proto.BOOL, number=2) + name = proto.Field(proto.STRING, number=1,) + force = proto.Field(proto.BOOL, number=2,) class ListTagsRequest(proto.Message): @@ -955,11 +1019,9 @@ class ListTagsRequest(proto.Message): If empty, the first page is returned. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListTagsResponse(proto.Message): @@ -980,8 +1042,7 @@ def raw_page(self): return self tags = proto.RepeatedField(proto.MESSAGE, number=1, message=gcd_tags.Tag,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListEntriesRequest(proto.Message): @@ -1008,13 +1069,10 @@ class ListEntriesRequest(proto.Message): return a list of Entries with only "name" field. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask,) class ListEntriesResponse(proto.Message): @@ -1035,8 +1093,7 @@ def raw_page(self): return self entries = proto.RepeatedField(proto.MESSAGE, number=1, message="Entry",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py b/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py index ce3a5a6e..37b54c53 100644 --- a/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py +++ b/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datacatalog_v1.types import timestamps @@ -28,7 +25,6 @@ class GcsFilesetSpec(proto.Message): r"""Describes a Cloud Storage fileset entry. - Attributes: file_patterns (Sequence[str]): Required. Patterns to identify a set of files in Google @@ -69,8 +65,7 @@ class GcsFilesetSpec(proto.Message): are represented here. """ - file_patterns = proto.RepeatedField(proto.STRING, number=1) - + file_patterns = proto.RepeatedField(proto.STRING, number=1,) sample_gcs_file_specs = proto.RepeatedField( proto.MESSAGE, number=2, message="GcsFileSpec", ) @@ -78,7 +73,6 @@ class GcsFilesetSpec(proto.Message): class GcsFileSpec(proto.Message): r"""Specifications of a single file in Cloud Storage. - Attributes: file_path (str): Required. The full file path. Example: @@ -90,13 +84,11 @@ class GcsFileSpec(proto.Message): Output only. The size of the file, in bytes. """ - file_path = proto.Field(proto.STRING, number=1) - + file_path = proto.Field(proto.STRING, number=1,) gcs_timestamps = proto.Field( proto.MESSAGE, number=2, message=timestamps.SystemTimestamps, ) - - size_bytes = proto.Field(proto.INT64, number=4) + size_bytes = proto.Field(proto.INT64, number=4,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/policytagmanager.py b/google/cloud/datacatalog_v1/types/policytagmanager.py new file mode 100644 index 00000000..1aa3cba9 --- /dev/null +++ b/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import timestamps +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datacatalog.v1", + manifest={ + "Taxonomy", + "PolicyTag", + "CreateTaxonomyRequest", + "DeleteTaxonomyRequest", + "UpdateTaxonomyRequest", + "ListTaxonomiesRequest", + "ListTaxonomiesResponse", + "GetTaxonomyRequest", + "CreatePolicyTagRequest", + "DeletePolicyTagRequest", + "UpdatePolicyTagRequest", + "ListPolicyTagsRequest", + "ListPolicyTagsResponse", + "GetPolicyTagRequest", + }, +) + + +class Taxonomy(proto.Message): + r"""A taxonomy is a collection of hierarchical policy tags that classify + data along a common axis. For instance a "data sensitivity" taxonomy + could contain the following policy tags: + + :: + + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + + A "data origin" taxonomy could contain the following policy tags: + + :: + + + User data + + Employee data + + Partner data + + Public data + + Attributes: + name (str): + Output only. Resource name of this taxonomy in format: + "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}". + Note that taxonomy_id's are unique and generated by Policy + Tag Manager. + display_name (str): + Required. User-defined name of this taxonomy. + It must: contain only unicode letters, numbers, + underscores, dashes and spaces; not start or end + with spaces; and be at most 200 bytes long when + encoded in UTF-8. + description (str): + Optional. Description of this taxonomy. It + must: contain only unicode characters, tabs, + newlines, carriage returns and page breaks; and + be at most 2000 bytes long when encoded in + UTF-8. If not set, defaults to an empty + description. + policy_tag_count (int): + Output only. Number of policy tags contained + in this taxonomy. + taxonomy_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): + Output only. Timestamps about this taxonomy. Only + create_time and update_time are used. + activated_policy_types (Sequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): + Optional. A list of policy types that are + activated for this taxonomy. If not set, + defaults to an empty list. + """ + + class PolicyType(proto.Enum): + r"""Defines policy types where the policy tags can be used for.""" + POLICY_TYPE_UNSPECIFIED = 0 + FINE_GRAINED_ACCESS_CONTROL = 1 + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + policy_tag_count = proto.Field(proto.INT32, number=4,) + taxonomy_timestamps = proto.Field( + proto.MESSAGE, number=5, message=timestamps.SystemTimestamps, + ) + activated_policy_types = proto.RepeatedField(proto.ENUM, number=6, enum=PolicyType,) + + +class PolicyTag(proto.Message): + r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy tags can be + defined in a hierarchy. For example, consider the following + hierarchy: + + :: + + + Geolocation + + LatLong + + City + + ZipCode + + Policy tag "Geolocation" contains 3 child policy tags: "LatLong", + "City", and "ZipCode". + + Attributes: + name (str): + Output only. Resource name of this policy tag in format: + "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{policy_tag_id}". + Both taxonomy_ids and policy_tag_ids are unique and + generated by Policy Tag Manager. + display_name (str): + Required. User-defined name of this policy + tag. It must: be unique within the parent + taxonomy; contain only unicode letters, numbers, + underscores, dashes and spaces; not start or end + with spaces; and be at most 200 bytes long when + encoded in UTF-8. + description (str): + Description of this policy tag. It must: + contain only unicode characters, tabs, newlines, + carriage returns and page breaks; and be at most + 2000 bytes long when encoded in UTF-8. If not + set, defaults to an empty description. If not + set, defaults to an empty description. + parent_policy_tag (str): + Resource name of this policy tag's parent + policy tag (e.g. for the "LatLong" policy tag in + the example above, this field contains the + resource name of the "Geolocation" policy tag). + If empty, it means this policy tag is a top + level policy tag (e.g. this field is empty for + the "Geolocation" policy tag in the example + above). If not set, defaults to an empty string. + child_policy_tags (Sequence[str]): + Output only. Resource names of child policy + tags of this policy tag. + """ + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + parent_policy_tag = proto.Field(proto.STRING, number=4,) + child_policy_tags = proto.RepeatedField(proto.STRING, number=5,) + + +class CreateTaxonomyRequest(proto.Message): + r"""Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + + Attributes: + parent (str): + Required. Resource name of the project that + the taxonomy will belong to. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + taxonomy = proto.Field(proto.MESSAGE, number=2, message="Taxonomy",) + + +class DeleteTaxonomyRequest(proto.Message): + r"""Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the taxonomy to be + deleted. All policy tags in this taxonomy will + also be deleted. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class UpdateTaxonomyRequest(proto.Message): + r"""Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + + Attributes: + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to update. Only description, display_name, and + activated policy types can be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + taxonomy = proto.Field(proto.MESSAGE, number=1, message="Taxonomy",) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) + + +class ListTaxonomiesRequest(proto.Message): + r"""Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of the project to + list the taxonomies of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000. If not set, + defaults to 50. + page_token (str): + The next_page_token value returned from a previous list + request, if any. If not set, defaults to an empty string. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListTaxonomiesResponse(proto.Message): + r"""Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.Taxonomy]): + Taxonomies that the project contains. + next_page_token (str): + Token used to retrieve the next page of + results, or empty if there are no more results + in the list. + """ + + @property + def raw_page(self): + return self + + taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message="Taxonomy",) + next_page_token = proto.Field(proto.STRING, number=2,) + + +class GetTaxonomyRequest(proto.Message): + r"""Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the requested + taxonomy. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreatePolicyTagRequest(proto.Message): + r"""Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy that + the policy tag will belong to. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to be created. + """ + + parent = proto.Field(proto.STRING, number=1,) + policy_tag = proto.Field(proto.MESSAGE, number=2, message="PolicyTag",) + + +class DeletePolicyTagRequest(proto.Message): + r"""Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + + Attributes: + name (str): + Required. Resource name of the policy tag to + be deleted. All of its descendant policy tags + will also be deleted. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class UpdatePolicyTagRequest(proto.Message): + r"""Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + + Attributes: + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. Only display_name, + description and parent_policy_tag can be updated and thus + can be listed in the mask. If update_mask is not provided, + all allowed fields (i.e. display_name, description and + parent) will be updated. For more information including the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + policy_tag = proto.Field(proto.MESSAGE, number=1, message="PolicyTag",) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) + + +class ListPolicyTagsRequest(proto.Message): + r"""Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy to + list the policy tags of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000. If not set, + defaults to 50. + page_token (str): + The next_page_token value returned from a previous List + request, if any. If not set, defaults to an empty string. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListPolicyTagsResponse(proto.Message): + r"""Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Attributes: + policy_tags (Sequence[google.cloud.datacatalog_v1.types.PolicyTag]): + The policy tags that are in the requested + taxonomy. + next_page_token (str): + Token used to retrieve the next page of + results, or empty if there are no more results + in the list. + """ + + @property + def raw_page(self): + return self + + policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message="PolicyTag",) + next_page_token = proto.Field(proto.STRING, number=2,) + + +class GetPolicyTagRequest(proto.Message): + r"""Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + + Attributes: + name (str): + Required. Resource name of the requested + policy tag. + """ + + name = proto.Field(proto.STRING, number=1,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py b/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py new file mode 100644 index 00000000..17d84604 --- /dev/null +++ b/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py @@ -0,0 +1,196 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager + + +__protobuf__ = proto.module( + package="google.cloud.datacatalog.v1", + manifest={ + "SerializedTaxonomy", + "SerializedPolicyTag", + "ImportTaxonomiesRequest", + "InlineSource", + "CrossRegionalSource", + "ImportTaxonomiesResponse", + "ExportTaxonomiesRequest", + "ExportTaxonomiesResponse", + }, +) + + +class SerializedTaxonomy(proto.Message): + r"""Message representing a taxonomy, including its policy tags in + hierarchy, as a nested proto. Used for taxonomy replacement, + import, and export. + + Attributes: + display_name (str): + Required. Display name of the taxonomy. At + most 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized taxonomy. At + most 2000 bytes when encoded in UTF-8. If not + set, defaults to an empty description. + policy_tags (Sequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): + Top level policy tags associated with the + taxonomy, if any. + activated_policy_types (Sequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): + A list of policy types that are activated per + taxonomy. + """ + + display_name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + policy_tags = proto.RepeatedField( + proto.MESSAGE, number=3, message="SerializedPolicyTag", + ) + activated_policy_types = proto.RepeatedField( + proto.ENUM, number=4, enum=policytagmanager.Taxonomy.PolicyType, + ) + + +class SerializedPolicyTag(proto.Message): + r"""Message representing one policy tag, including all its + descendant policy tags, as a nested proto. + + Attributes: + policy_tag (str): + Resource name of the policy tag. + This field will be ignored when calling + ImportTaxonomies. + display_name (str): + Required. Display name of the policy tag. At + most 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized policy tag. The + length of the description is limited to 2000 + bytes when encoded in UTF-8. If not set, + defaults to an empty description. + child_policy_tags (Sequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): + Children of the policy tag, if any. + """ + + policy_tag = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + child_policy_tags = proto.RepeatedField( + proto.MESSAGE, number=4, message="SerializedPolicyTag", + ) + + +class ImportTaxonomiesRequest(proto.Message): + r"""Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of project that the + imported taxonomies will belong to. + inline_source (google.cloud.datacatalog_v1.types.InlineSource): + Inline source used for taxonomies import. + cross_regional_source (google.cloud.datacatalog_v1.types.CrossRegionalSource): + Cross-regional source taxonomy to be + imported. + """ + + parent = proto.Field(proto.STRING, number=1,) + inline_source = proto.Field( + proto.MESSAGE, number=2, oneof="source", message="InlineSource", + ) + cross_regional_source = proto.Field( + proto.MESSAGE, number=3, oneof="source", message="CrossRegionalSource", + ) + + +class InlineSource(proto.Message): + r"""Inline source containing taxonomies to import. + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): + Required. Taxonomies to be imported. + """ + + taxonomies = proto.RepeatedField( + proto.MESSAGE, number=1, message="SerializedTaxonomy", + ) + + +class CrossRegionalSource(proto.Message): + r"""Cross-regional source used to import an existing taxonomy + into a different region. + + Attributes: + taxonomy (str): + Required. The resource name of the source + taxonomy to be imported. + """ + + taxonomy = proto.Field(proto.STRING, number=1,) + + +class ImportTaxonomiesResponse(proto.Message): + r"""Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.Taxonomy]): + Taxonomies that were imported. + """ + + taxonomies = proto.RepeatedField( + proto.MESSAGE, number=1, message=policytagmanager.Taxonomy, + ) + + +class ExportTaxonomiesRequest(proto.Message): + r"""Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of the project that + the exported taxonomies belong to. + taxonomies (Sequence[str]): + Required. Resource names of the taxonomies to + be exported. + serialized_taxonomies (bool): + Export taxonomies as serialized taxonomies, + which contain all the policy tags as nested + protos. + """ + + parent = proto.Field(proto.STRING, number=1,) + taxonomies = proto.RepeatedField(proto.STRING, number=2,) + serialized_taxonomies = proto.Field(proto.BOOL, number=3, oneof="destination",) + + +class ExportTaxonomiesResponse(proto.Message): + r"""Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + Attributes: + taxonomies (Sequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): + List of taxonomies and policy tags as nested + protos. + """ + + taxonomies = proto.RepeatedField( + proto.MESSAGE, number=1, message="SerializedTaxonomy", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/schema.py b/google/cloud/datacatalog_v1/types/schema.py index debec332..2c1756b4 100644 --- a/google/cloud/datacatalog_v1/types/schema.py +++ b/google/cloud/datacatalog_v1/types/schema.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -25,11 +23,12 @@ class Schema(proto.Message): r"""Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema). - Attributes: columns (Sequence[google.cloud.datacatalog_v1.types.ColumnSchema]): - Required. Schema of columns. A maximum of - 10,000 columns and sub-columns can be specified. + The unified GoogleSQL-like schema of columns. + The overall maximum number of columns and nested + columns is 10,000. The maximum nested depth is + 15 levels. """ columns = proto.RepeatedField(proto.MESSAGE, number=2, message="ColumnSchema",) @@ -42,29 +41,32 @@ class ColumnSchema(proto.Message): Attributes: column (str): Required. Name of the column. + Must be a UTF-8 string without dots (.). + The maximum size is 64 bytes. type_ (str): Required. Type of the column. + Must be a UTF-8 string with the maximum size of + 128 bytes. description (str): Optional. Description of the column. Default value is an empty string. + The description must be a UTF-8 string with the + maximum size of 2000 bytes. mode (str): - Optional. A column's mode indicates whether the values in - this column are required, nullable, etc. Only ``NULLABLE``, - ``REQUIRED`` and ``REPEATED`` are supported. Default mode is - ``NULLABLE``. + Optional. A column's mode indicates if values in this column + are required, nullable, or repeated. + + Only ``NULLABLE``, ``REQUIRED``, and ``REPEATED`` values are + supported. Default mode is ``NULLABLE``. subcolumns (Sequence[google.cloud.datacatalog_v1.types.ColumnSchema]): Optional. Schema of sub-columns. A column can have zero or more sub-columns. """ - column = proto.Field(proto.STRING, number=6) - - type_ = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - mode = proto.Field(proto.STRING, number=3) - + column = proto.Field(proto.STRING, number=6,) + type_ = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + mode = proto.Field(proto.STRING, number=3,) subcolumns = proto.RepeatedField(proto.MESSAGE, number=7, message="ColumnSchema",) diff --git a/google/cloud/datacatalog_v1/types/search.py b/google/cloud/datacatalog_v1/types/search.py index cdcb129f..52c63d71 100644 --- a/google/cloud/datacatalog_v1/types/search.py +++ b/google/cloud/datacatalog_v1/types/search.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datacatalog_v1.types import common +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -66,6 +64,9 @@ class SearchCatalogResult(proto.Message): Example: - ``//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId`` + modify_time (google.protobuf.timestamp_pb2.Timestamp): + Last-modified timestamp of the entry from the + managing system. integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem): Output only. This field indicates the entry's source system that Data Catalog integrates with, @@ -74,21 +75,27 @@ class SearchCatalogResult(proto.Message): This field indicates the entry's source system that Data Catalog does not integrate with. + fully_qualified_name (str): + Fully Qualified Name of the resource. There are two main + forms of FQNs: {system}:{project}.{dot-separated path to + resource} for non-regionalized resources + {system}:{project}.{location id}.{dot-separated path to + resource} for regionalized resources Examples: + + - dataproc_metastore:projectId.locationId.instanceId.databaseId.tableId + - bigquery:table.project_id.dataset_id.table_id """ search_result_type = proto.Field(proto.ENUM, number=1, enum="SearchResultType",) - - search_result_subtype = proto.Field(proto.STRING, number=2) - - relative_resource_name = proto.Field(proto.STRING, number=3) - - linked_resource = proto.Field(proto.STRING, number=4) - + search_result_subtype = proto.Field(proto.STRING, number=2,) + relative_resource_name = proto.Field(proto.STRING, number=3,) + linked_resource = proto.Field(proto.STRING, number=4,) + modify_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) integrated_system = proto.Field( proto.ENUM, number=8, oneof="system", enum=common.IntegratedSystem, ) - - user_specified_system = proto.Field(proto.STRING, number=9, oneof="system") + user_specified_system = proto.Field(proto.STRING, number=9, oneof="system",) + fully_qualified_name = proto.Field(proto.STRING, number=10,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/table_spec.py b/google/cloud/datacatalog_v1/types/table_spec.py index 8404dba2..aed2af12 100644 --- a/google/cloud/datacatalog_v1/types/table_spec.py +++ b/google/cloud/datacatalog_v1/types/table_spec.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -35,11 +33,11 @@ class TableSourceType(proto.Enum): TABLE_SOURCE_TYPE_UNSPECIFIED = 0 BIGQUERY_VIEW = 2 BIGQUERY_TABLE = 5 + BIGQUERY_MATERIALIZED_VIEW = 7 class BigQueryTableSpec(proto.Message): r"""Describes a BigQuery table. - Attributes: table_source_type (google.cloud.datacatalog_v1.types.TableSourceType): Output only. The table source type. @@ -52,11 +50,9 @@ class BigQueryTableSpec(proto.Message): """ table_source_type = proto.Field(proto.ENUM, number=1, enum="TableSourceType",) - view_spec = proto.Field( proto.MESSAGE, number=2, oneof="type_spec", message="ViewSpec", ) - table_spec = proto.Field( proto.MESSAGE, number=3, oneof="type_spec", message="TableSpec", ) @@ -64,19 +60,17 @@ class BigQueryTableSpec(proto.Message): class ViewSpec(proto.Message): r"""Table view specification. - Attributes: view_query (str): Output only. The query that defines the table view. """ - view_query = proto.Field(proto.STRING, number=1) + view_query = proto.Field(proto.STRING, number=1,) class TableSpec(proto.Message): r"""Normal BigQuery table spec. - Attributes: grouped_entry (str): Output only. If the table is a dated shard, i.e., with name @@ -87,7 +81,7 @@ class TableSpec(proto.Message): Otherwise, ``grouped_entry`` is empty. """ - grouped_entry = proto.Field(proto.STRING, number=1) + grouped_entry = proto.Field(proto.STRING, number=1,) class BigQueryDateShardedSpec(proto.Message): @@ -109,11 +103,9 @@ class BigQueryDateShardedSpec(proto.Message): Output only. Total number of shards. """ - dataset = proto.Field(proto.STRING, number=1) - - table_prefix = proto.Field(proto.STRING, number=2) - - shard_count = proto.Field(proto.INT64, number=3) + dataset = proto.Field(proto.STRING, number=1,) + table_prefix = proto.Field(proto.STRING, number=2,) + shard_count = proto.Field(proto.INT64, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1/types/tags.py b/google/cloud/datacatalog_v1/types/tags.py index e85c5036..8b4fdae5 100644 --- a/google/cloud/datacatalog_v1/types/tags.py +++ b/google/cloud/datacatalog_v1/types/tags.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -39,30 +36,29 @@ class Tag(proto.Message): name (str): The resource name of the tag in URL format. Example: - - projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + ``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}`` - where ``tag_id`` is a system-generated identifier. Note that - this Tag may not actually be stored in the location in this - name. + where ``tag_id`` is a system-generated identifier. + + Note: The tag itself might not be stored in the location + specified in its name. template (str): Required. The resource name of the tag template that this tag uses. Example: - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + ``projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}`` This field cannot be modified after creation. template_display_name (str): Output only. The display name of the tag template. column (str): - Resources like Entry can have schemas associated with them. + Resources like entry can have schemas associated with them. This scope allows users to attach tags to an individual column based on that schema. - For attaching a tag to a nested column, use ``.`` to - separate the column names. Example: - - - ``outer_column.inner_column`` + To attach a tag to a nested column, separate column names + with a dot (``.``). Example: ``column.nested_column``. fields (Sequence[google.cloud.datacatalog_v1.types.Tag.FieldsEntry]): Required. This maps the ID of a tag field to the value of and additional information about @@ -71,14 +67,10 @@ class Tag(proto.Message): and at most 500 fields. """ - name = proto.Field(proto.STRING, number=1) - - template = proto.Field(proto.STRING, number=2) - - template_display_name = proto.Field(proto.STRING, number=5) - - column = proto.Field(proto.STRING, number=4, oneof="scope") - + name = proto.Field(proto.STRING, number=1,) + template = proto.Field(proto.STRING, number=2,) + template_display_name = proto.Field(proto.STRING, number=5,) + column = proto.Field(proto.STRING, number=4, oneof="scope",) fields = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="TagField",) @@ -94,7 +86,8 @@ class TagField(proto.Message): type. string_value (str): Holds the value for a tag field with string - type. + type. The maximum length is 2000 UTF-8 + characters. bool_value (bool): Holds the value for a tag field with boolean type. @@ -117,29 +110,22 @@ class TagField(proto.Message): class EnumValue(proto.Message): r"""Holds an enum value. - Attributes: display_name (str): The display name of the enum value. """ - display_name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=1) - - double_value = proto.Field(proto.DOUBLE, number=2, oneof="kind") - - string_value = proto.Field(proto.STRING, number=3, oneof="kind") - - bool_value = proto.Field(proto.BOOL, number=4, oneof="kind") + display_name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=1,) + double_value = proto.Field(proto.DOUBLE, number=2, oneof="kind",) + string_value = proto.Field(proto.STRING, number=3, oneof="kind",) + bool_value = proto.Field(proto.BOOL, number=4, oneof="kind",) timestamp_value = proto.Field( - proto.MESSAGE, number=5, oneof="kind", message=timestamp.Timestamp, + proto.MESSAGE, number=5, oneof="kind", message=timestamp_pb2.Timestamp, ) - enum_value = proto.Field(proto.MESSAGE, number=6, oneof="kind", message=EnumValue,) - - order = proto.Field(proto.INT32, number=7) + order = proto.Field(proto.INT32, number=7,) class TagTemplate(proto.Message): @@ -158,13 +144,16 @@ class TagTemplate(proto.Message): The resource name of the tag template in URL format. Example: - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + ``projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}`` - Note that this TagTemplate and its child resources may not - actually be stored in the location in this name. + Note: The tag template itself and its child resources might + not be stored in the location specified in its name. display_name (str): - The display name for this template. Defaults - to an empty string. + Display name for this template. Defaults to an empty string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. fields (Sequence[google.cloud.datacatalog_v1.types.TagTemplate.FieldsEntry]): Required. Map of tag template field IDs to the settings for the field. This map is an exhaustive list of the allowed @@ -178,10 +167,8 @@ class TagTemplate(proto.Message): must start with a letter or underscore. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) fields = proto.MapField( proto.STRING, proto.MESSAGE, number=3, message="TagTemplateField", ) @@ -189,25 +176,35 @@ class TagTemplate(proto.Message): class TagTemplateField(proto.Message): r"""The template for an individual field within a tag template. - Attributes: name (str): Output only. The resource name of the tag template field in URL format. Example: - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} + ``projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}`` - Note that this TagTemplateField may not actually be stored - in the location in this name. + Note: The ``TagTemplateField`` itself might not be stored in + the location specified in its name. + + The name must contain only letters (a-z, A-Z), numbers + (0-9), or underscores (_), and must start with a letter or + underscore. The maximum length is 64 characters. display_name (str): - The display name for this field. Defaults to - an empty string. + The display name for this field. Defaults to an empty + string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. type_ (google.cloud.datacatalog_v1.types.FieldType): Required. The type of value this tag field can contain. is_required (bool): Whether this is a required field. Defaults to false. + description (str): + The description for this field. Defaults to + an empty string. order (int): The order of this field with respect to other fields in this tag template. For example, a @@ -217,20 +214,16 @@ class TagTemplateField(proto.Message): within a tag do not have to be sequential. """ - name = proto.Field(proto.STRING, number=6) - - display_name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=6,) + display_name = proto.Field(proto.STRING, number=1,) type_ = proto.Field(proto.MESSAGE, number=2, message="FieldType",) - - is_required = proto.Field(proto.BOOL, number=3) - - order = proto.Field(proto.INT32, number=5) + is_required = proto.Field(proto.BOOL, number=3,) + description = proto.Field(proto.STRING, number=4,) + order = proto.Field(proto.INT32, number=5,) class FieldType(proto.Message): r""" - Attributes: primitive_type (google.cloud.datacatalog_v1.types.FieldType.PrimitiveType): Represents primitive types - string, bool @@ -249,31 +242,34 @@ class PrimitiveType(proto.Enum): class EnumType(proto.Message): r""" - Attributes: allowed_values (Sequence[google.cloud.datacatalog_v1.types.FieldType.EnumType.EnumValue]): - Required on create; optional on update. The - set of allowed values for this enum. This set - must not be empty, the display names of the - values in this set must not be empty and the - display names of the values must be case- - insensitively unique within this set. Currently, - enum values can only be added to the list of - allowed values. Deletion and renaming of enum - values are not supported. Can have up to 500 - allowed values. + The set of allowed values for this enum. + + This set must not be empty and can include up to 100 allowed + values. The display names of the values in this set must not + be empty and must be case-insensitively unique within this + set. + + The order of items in this set is preserved. This field can + be used to create, remove and reorder enum values. To rename + enum values, use the ``RenameTagTemplateFieldEnumValue`` + method. """ class EnumValue(proto.Message): r""" - Attributes: display_name (str): - Required. The display name of the enum value. - Must not be an empty string. + Required. The display name of the enum value. Must not be an + empty string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. """ - display_name = proto.Field(proto.STRING, number=1) + display_name = proto.Field(proto.STRING, number=1,) allowed_values = proto.RepeatedField( proto.MESSAGE, number=1, message="FieldType.EnumType.EnumValue", @@ -282,7 +278,6 @@ class EnumValue(proto.Message): primitive_type = proto.Field( proto.ENUM, number=1, oneof="type_decl", enum=PrimitiveType, ) - enum_type = proto.Field( proto.MESSAGE, number=2, oneof="type_decl", message=EnumType, ) diff --git a/google/cloud/datacatalog_v1/types/timestamps.py b/google/cloud/datacatalog_v1/types/timestamps.py index 4d4a834f..9c27d676 100644 --- a/google/cloud/datacatalog_v1/types/timestamps.py +++ b/google/cloud/datacatalog_v1/types/timestamps.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -43,11 +40,9 @@ class SystemTimestamps(proto.Message): apllicable to BigQuery resources. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index be0bdd8e..c794255e 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,10 +15,16 @@ # from .services.data_catalog import DataCatalogClient +from .services.data_catalog import DataCatalogAsyncClient from .services.policy_tag_manager import PolicyTagManagerClient +from .services.policy_tag_manager import PolicyTagManagerAsyncClient from .services.policy_tag_manager_serialization import ( PolicyTagManagerSerializationClient, ) +from .services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationAsyncClient, +) + from .types.common import IntegratedSystem from .types.datacatalog import CreateEntryGroupRequest from .types.datacatalog import CreateEntryRequest @@ -33,7 +38,6 @@ from .types.datacatalog import DeleteTagTemplateRequest from .types.datacatalog import Entry from .types.datacatalog import EntryGroup -from .types.datacatalog import EntryType from .types.datacatalog import GetEntryGroupRequest from .types.datacatalog import GetEntryRequest from .types.datacatalog import GetTagTemplateRequest @@ -52,8 +56,9 @@ from .types.datacatalog import UpdateTagRequest from .types.datacatalog import UpdateTagTemplateFieldRequest from .types.datacatalog import UpdateTagTemplateRequest -from .types.gcs_fileset_spec import GcsFileSpec +from .types.datacatalog import EntryType from .types.gcs_fileset_spec import GcsFilesetSpec +from .types.gcs_fileset_spec import GcsFileSpec from .types.policytagmanager import CreatePolicyTagRequest from .types.policytagmanager import CreateTaxonomyRequest from .types.policytagmanager import DeletePolicyTagRequest @@ -81,9 +86,9 @@ from .types.search import SearchResultType from .types.table_spec import BigQueryDateShardedSpec from .types.table_spec import BigQueryTableSpec -from .types.table_spec import TableSourceType from .types.table_spec import TableSpec from .types.table_spec import ViewSpec +from .types.table_spec import TableSourceType from .types.tags import FieldType from .types.tags import Tag from .types.tags import TagField @@ -91,8 +96,10 @@ from .types.tags import TagTemplateField from .types.timestamps import SystemTimestamps - __all__ = ( + "DataCatalogAsyncClient", + "PolicyTagManagerAsyncClient", + "PolicyTagManagerSerializationAsyncClient", "BigQueryDateShardedSpec", "BigQueryTableSpec", "ColumnSchema", @@ -141,6 +148,7 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +173,4 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", ) diff --git a/google/cloud/datacatalog_v1beta1/gapic_metadata.json b/google/cloud/datacatalog_v1beta1/gapic_metadata.json new file mode 100644 index 00000000..39950163 --- /dev/null +++ b/google/cloud/datacatalog_v1beta1/gapic_metadata.json @@ -0,0 +1,471 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.datacatalog_v1beta1", + "protoPackage": "google.cloud.datacatalog.v1beta1", + "schema": "1.0", + "services": { + "DataCatalog": { + "clients": { + "grpc": { + "libraryClient": "DataCatalogClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataCatalogAsyncClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + } + } + }, + "PolicyTagManager": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerAsyncClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + } + } + }, + "PolicyTagManagerSerialization": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerSerializationClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerSerializationAsyncClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/datacatalog_v1beta1/services/__init__.py b/google/cloud/datacatalog_v1beta1/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/datacatalog_v1beta1/services/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py index e56ed8a6..55cf6fbb 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DataCatalogClient from .async_client import DataCatalogAsyncClient diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index f334609f..7ceda1ef 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,10 +20,10 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers @@ -37,10 +35,9 @@ from google.cloud.datacatalog_v1beta1.types import table_spec from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport from .client import DataCatalogClient @@ -68,27 +65,22 @@ class DataCatalogAsyncClient: parse_tag_template_field_path = staticmethod( DataCatalogClient.parse_tag_template_field_path ) - common_billing_account_path = staticmethod( DataCatalogClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( DataCatalogClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DataCatalogClient.common_folder_path) parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) - common_organization_path = staticmethod(DataCatalogClient.common_organization_path) parse_common_organization_path = staticmethod( DataCatalogClient.parse_common_organization_path ) - common_project_path = staticmethod(DataCatalogClient.common_project_path) parse_common_project_path = staticmethod( DataCatalogClient.parse_common_project_path ) - common_location_path = staticmethod(DataCatalogClient.common_location_path) parse_common_location_path = staticmethod( DataCatalogClient.parse_common_location_path @@ -96,7 +88,8 @@ class DataCatalogAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -111,7 +104,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -128,7 +121,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataCatalogTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DataCatalogTransport: The transport used by the client instance. @@ -142,12 +135,12 @@ def transport(self) -> DataCatalogTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DataCatalogTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the data catalog client. + """Instantiates the data catalog client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -179,7 +172,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DataCatalogClient( credentials=credentials, transport=transport, @@ -248,7 +240,6 @@ async def search_catalog( This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -278,7 +269,6 @@ async def search_catalog( # If we have keyword arguments corresponding to fields on the # request, apply these. - if scope is not None: request.scope = scope if query is not None: @@ -358,7 +348,6 @@ async def create_entry_group( This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -388,7 +377,6 @@ async def create_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_group_id is not None: @@ -421,7 +409,7 @@ async def update_entry_group( request: datacatalog.UpdateEntryGroupRequest = None, *, entry_group: datacatalog.EntryGroup = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -451,7 +439,6 @@ async def update_entry_group( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -481,7 +468,6 @@ async def update_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry_group is not None: request.entry_group = entry_group if update_mask is not None: @@ -514,7 +500,7 @@ async def get_entry_group( request: datacatalog.GetEntryGroupRequest = None, *, name: str = None, - read_mask: field_mask.FieldMask = None, + read_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -539,7 +525,6 @@ async def get_entry_group( This corresponds to the ``read_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -569,7 +554,6 @@ async def get_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if read_mask is not None: @@ -584,7 +568,8 @@ async def get_entry_group( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -631,7 +616,6 @@ async def delete_entry_group( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -652,7 +636,6 @@ async def delete_entry_group( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -665,7 +648,8 @@ async def delete_entry_group( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -709,7 +693,6 @@ async def list_entry_groups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -739,7 +722,6 @@ async def list_entry_groups( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -819,7 +801,6 @@ async def create_entry( This corresponds to the ``entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -856,7 +837,6 @@ async def create_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_id is not None: @@ -889,7 +869,7 @@ async def update_entry( request: datacatalog.UpdateEntryRequest = None, *, entry: datacatalog.Entry = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -942,7 +922,6 @@ async def update_entry( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -979,7 +958,6 @@ async def update_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry is not None: request.entry = entry if update_mask is not None: @@ -1036,7 +1014,6 @@ async def delete_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1057,7 +1034,6 @@ async def delete_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1070,7 +1046,8 @@ async def delete_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1112,7 +1089,6 @@ async def get_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1149,7 +1125,6 @@ async def get_entry( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1162,7 +1137,8 @@ async def get_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1199,7 +1175,6 @@ async def lookup_entry( request (:class:`google.cloud.datacatalog_v1beta1.types.LookupEntryRequest`): The request object. Request message for [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1223,7 +1198,6 @@ async def lookup_entry( """ # Create or coerce a protobuf request object. - request = datacatalog.LookupEntryRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1235,7 +1209,8 @@ async def lookup_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1273,7 +1248,6 @@ async def list_entries( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1303,7 +1277,6 @@ async def list_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1378,7 +1351,6 @@ async def create_tag_template( This corresponds to the ``tag_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1412,7 +1384,6 @@ async def create_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_id is not None: @@ -1463,7 +1434,6 @@ async def get_tag_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1497,7 +1467,6 @@ async def get_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1510,7 +1479,8 @@ async def get_tag_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1535,7 +1505,7 @@ async def update_tag_template( request: datacatalog.UpdateTagTemplateRequest = None, *, tag_template: tags.TagTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1575,7 +1545,6 @@ async def update_tag_template( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1609,7 +1578,6 @@ async def update_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag_template is not None: request.tag_template = tag_template if update_mask is not None: @@ -1675,7 +1643,6 @@ async def delete_tag_template( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1696,7 +1663,6 @@ async def delete_tag_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -1711,7 +1677,8 @@ async def delete_tag_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1781,7 +1748,6 @@ async def create_tag_template_field( This corresponds to the ``tag_template_field`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1808,7 +1774,6 @@ async def create_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_field_id is not None: @@ -1842,7 +1807,7 @@ async def update_tag_template_field( *, name: str = None, tag_template_field: tags.TagTemplateField = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1891,7 +1856,6 @@ async def update_tag_template_field( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1918,7 +1882,6 @@ async def update_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if tag_template_field is not None: @@ -1981,7 +1944,6 @@ async def rename_tag_template_field( This corresponds to the ``new_tag_template_field_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2008,7 +1970,6 @@ async def rename_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if new_tag_template_field_id is not None: @@ -2073,7 +2034,6 @@ async def delete_tag_template_field( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2094,7 +2054,6 @@ async def delete_tag_template_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -2109,7 +2068,8 @@ async def delete_tag_template_field( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -2167,7 +2127,6 @@ async def create_tag( This corresponds to the ``tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2200,7 +2159,6 @@ async def create_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag is not None: @@ -2231,7 +2189,7 @@ async def update_tag( request: datacatalog.UpdateTagRequest = None, *, tag: tags.Tag = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2257,7 +2215,6 @@ async def update_tag( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2290,7 +2247,6 @@ async def update_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag is not None: request.tag = tag if update_mask is not None: @@ -2339,7 +2295,6 @@ async def delete_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2360,7 +2315,6 @@ async def delete_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2373,7 +2327,8 @@ async def delete_tag( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -2422,7 +2377,6 @@ async def list_tags( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2452,7 +2406,6 @@ async def list_tags( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2465,7 +2418,8 @@ async def list_tags( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -2493,13 +2447,13 @@ async def list_tags( async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2531,7 +2485,6 @@ async def set_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2610,10 +2563,9 @@ async def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) - + request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest(resource=resource,) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2637,13 +2589,13 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a @@ -2679,7 +2631,6 @@ async def get_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2758,10 +2709,9 @@ async def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) - + request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest(resource=resource,) + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2785,12 +2735,12 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Returns the caller's permissions on a resource. If the resource does not exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). @@ -2810,7 +2760,6 @@ async def test_iam_permissions( request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2822,11 +2771,10 @@ async def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 28d471aa..859478ff 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -41,10 +39,9 @@ from google.cloud.datacatalog_v1beta1.types import table_spec from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataCatalogGrpcTransport from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport @@ -63,7 +60,7 @@ class DataCatalogClientMeta(type): _transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[DataCatalogTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -88,7 +85,8 @@ class DataCatalogClient(metaclass=DataCatalogClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -122,7 +120,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -139,7 +138,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -158,23 +157,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataCatalogTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DataCatalogTransport: The transport used by the client instance. + DataCatalogTransport: The transport used by the client + instance. """ return self._transport @staticmethod def entry_path(project: str, location: str, entry_group: str, entry: str,) -> str: - """Return a fully-qualified entry string.""" + """Returns a fully-qualified entry string.""" return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( project=project, location=location, entry_group=entry_group, entry=entry, ) @staticmethod def parse_entry_path(path: str) -> Dict[str, str]: - """Parse a entry path into its component segments.""" + """Parses a entry path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path, @@ -183,14 +183,14 @@ def parse_entry_path(path: str) -> Dict[str, str]: @staticmethod def entry_group_path(project: str, location: str, entry_group: str,) -> str: - """Return a fully-qualified entry_group string.""" + """Returns a fully-qualified entry_group string.""" return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( project=project, location=location, entry_group=entry_group, ) @staticmethod def parse_entry_group_path(path: str) -> Dict[str, str]: - """Parse a entry_group path into its component segments.""" + """Parses a entry_group path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path, @@ -201,7 +201,7 @@ def parse_entry_group_path(path: str) -> Dict[str, str]: def tag_path( project: str, location: str, entry_group: str, entry: str, tag: str, ) -> str: - """Return a fully-qualified tag string.""" + """Returns a fully-qualified tag string.""" return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( project=project, location=location, @@ -212,7 +212,7 @@ def tag_path( @staticmethod def parse_tag_path(path: str) -> Dict[str, str]: - """Parse a tag path into its component segments.""" + """Parses a tag path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)/tags/(?P.+?)$", path, @@ -221,14 +221,14 @@ def parse_tag_path(path: str) -> Dict[str, str]: @staticmethod def tag_template_path(project: str, location: str, tag_template: str,) -> str: - """Return a fully-qualified tag_template string.""" + """Returns a fully-qualified tag_template string.""" return "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( project=project, location=location, tag_template=tag_template, ) @staticmethod def parse_tag_template_path(path: str) -> Dict[str, str]: - """Parse a tag_template path into its component segments.""" + """Parses a tag_template path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)$", path, @@ -239,14 +239,14 @@ def parse_tag_template_path(path: str) -> Dict[str, str]: def tag_template_field_path( project: str, location: str, tag_template: str, field: str, ) -> str: - """Return a fully-qualified tag_template_field string.""" + """Returns a fully-qualified tag_template_field string.""" return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, ) @staticmethod def parse_tag_template_field_path(path: str) -> Dict[str, str]: - """Parse a tag_template_field path into its component segments.""" + """Parses a tag_template_field path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)$", path, @@ -255,7 +255,7 @@ def parse_tag_template_field_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -268,7 +268,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -279,7 +279,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -290,7 +290,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -301,7 +301,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -315,12 +315,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DataCatalogTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the data catalog client. + """Instantiates the data catalog client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -375,9 +375,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -389,12 +390,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -409,8 +412,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -486,7 +489,6 @@ def search_catalog( This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -518,10 +520,8 @@ def search_catalog( # there are no flattened fields. if not isinstance(request, datacatalog.SearchCatalogRequest): request = datacatalog.SearchCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if scope is not None: request.scope = scope if query is not None: @@ -597,7 +597,6 @@ def create_entry_group( This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -629,10 +628,8 @@ def create_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.CreateEntryGroupRequest): request = datacatalog.CreateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_group_id is not None: @@ -661,7 +658,7 @@ def update_entry_group( request: datacatalog.UpdateEntryGroupRequest = None, *, entry_group: datacatalog.EntryGroup = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -691,7 +688,6 @@ def update_entry_group( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -723,10 +719,8 @@ def update_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateEntryGroupRequest): request = datacatalog.UpdateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry_group is not None: request.entry_group = entry_group if update_mask is not None: @@ -755,7 +749,7 @@ def get_entry_group( request: datacatalog.GetEntryGroupRequest = None, *, name: str = None, - read_mask: field_mask.FieldMask = None, + read_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -780,7 +774,6 @@ def get_entry_group( This corresponds to the ``read_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -812,10 +805,8 @@ def get_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.GetEntryGroupRequest): request = datacatalog.GetEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if read_mask is not None: @@ -864,7 +855,6 @@ def delete_entry_group( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -887,10 +877,8 @@ def delete_entry_group( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteEntryGroupRequest): request = datacatalog.DeleteEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -934,7 +922,6 @@ def list_entry_groups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -966,10 +953,8 @@ def list_entry_groups( # there are no flattened fields. if not isinstance(request, datacatalog.ListEntryGroupsRequest): request = datacatalog.ListEntryGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1045,7 +1030,6 @@ def create_entry( This corresponds to the ``entry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1084,10 +1068,8 @@ def create_entry( # there are no flattened fields. if not isinstance(request, datacatalog.CreateEntryRequest): request = datacatalog.CreateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if entry_id is not None: @@ -1116,7 +1098,7 @@ def update_entry( request: datacatalog.UpdateEntryRequest = None, *, entry: datacatalog.Entry = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1169,7 +1151,6 @@ def update_entry( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1208,10 +1189,8 @@ def update_entry( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateEntryRequest): request = datacatalog.UpdateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if entry is not None: request.entry = entry if update_mask is not None: @@ -1264,7 +1243,6 @@ def delete_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1287,10 +1265,8 @@ def delete_entry( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteEntryRequest): request = datacatalog.DeleteEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1332,7 +1308,6 @@ def get_entry( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1371,10 +1346,8 @@ def get_entry( # there are no flattened fields. if not isinstance(request, datacatalog.GetEntryRequest): request = datacatalog.GetEntryRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1411,7 +1384,6 @@ def lookup_entry( request (google.cloud.datacatalog_v1beta1.types.LookupEntryRequest): The request object. Request message for [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1435,7 +1407,6 @@ def lookup_entry( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a datacatalog.LookupEntryRequest. # There's no risk of modifying the input as we've already verified @@ -1477,7 +1448,6 @@ def list_entries( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1509,10 +1479,8 @@ def list_entries( # there are no flattened fields. if not isinstance(request, datacatalog.ListEntriesRequest): request = datacatalog.ListEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1583,7 +1551,6 @@ def create_tag_template( This corresponds to the ``tag_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1619,10 +1586,8 @@ def create_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.CreateTagTemplateRequest): request = datacatalog.CreateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_id is not None: @@ -1669,7 +1634,6 @@ def get_tag_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1705,10 +1669,8 @@ def get_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.GetTagTemplateRequest): request = datacatalog.GetTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1733,7 +1695,7 @@ def update_tag_template( request: datacatalog.UpdateTagTemplateRequest = None, *, tag_template: tags.TagTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1773,7 +1735,6 @@ def update_tag_template( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1809,10 +1770,8 @@ def update_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateTagTemplateRequest): request = datacatalog.UpdateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag_template is not None: request.tag_template = tag_template if update_mask is not None: @@ -1874,7 +1833,6 @@ def delete_tag_template( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1897,10 +1855,8 @@ def delete_tag_template( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteTagTemplateRequest): request = datacatalog.DeleteTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -1972,7 +1928,6 @@ def create_tag_template_field( This corresponds to the ``tag_template_field`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2001,10 +1956,8 @@ def create_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.CreateTagTemplateFieldRequest): request = datacatalog.CreateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag_template_field_id is not None: @@ -2036,7 +1989,7 @@ def update_tag_template_field( *, name: str = None, tag_template_field: tags.TagTemplateField = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2085,7 +2038,6 @@ def update_tag_template_field( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2114,10 +2066,8 @@ def update_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateTagTemplateFieldRequest): request = datacatalog.UpdateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if tag_template_field is not None: @@ -2178,7 +2128,6 @@ def rename_tag_template_field( This corresponds to the ``new_tag_template_field_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2207,10 +2156,8 @@ def rename_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.RenameTagTemplateFieldRequest): request = datacatalog.RenameTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if new_tag_template_field_id is not None: @@ -2273,7 +2220,6 @@ def delete_tag_template_field( This corresponds to the ``force`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2296,10 +2242,8 @@ def delete_tag_template_field( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteTagTemplateFieldRequest): request = datacatalog.DeleteTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if force is not None: @@ -2361,7 +2305,6 @@ def create_tag( This corresponds to the ``tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2396,10 +2339,8 @@ def create_tag( # there are no flattened fields. if not isinstance(request, datacatalog.CreateTagRequest): request = datacatalog.CreateTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if tag is not None: @@ -2426,7 +2367,7 @@ def update_tag( request: datacatalog.UpdateTagRequest = None, *, tag: tags.Tag = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2452,7 +2393,6 @@ def update_tag( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2487,10 +2427,8 @@ def update_tag( # there are no flattened fields. if not isinstance(request, datacatalog.UpdateTagRequest): request = datacatalog.UpdateTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if tag is not None: request.tag = tag if update_mask is not None: @@ -2535,7 +2473,6 @@ def delete_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2558,10 +2495,8 @@ def delete_tag( # there are no flattened fields. if not isinstance(request, datacatalog.DeleteTagRequest): request = datacatalog.DeleteTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2610,7 +2545,6 @@ def list_tags( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2642,10 +2576,8 @@ def list_tags( # there are no flattened fields. if not isinstance(request, datacatalog.ListTagsRequest): request = datacatalog.ListTagsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2673,13 +2605,13 @@ def list_tags( def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: @@ -2711,7 +2643,6 @@ def set_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2790,11 +2721,10 @@ def set_iam_policy( if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.SetIamPolicyRequest() - + request = iam_policy_pb2.SetIamPolicyRequest() if resource is not None: request.resource = resource @@ -2816,13 +2746,13 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, resource: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the access control policy for a resource. A ``NOT_FOUND`` error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a @@ -2858,7 +2788,6 @@ def get_iam_policy( This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2937,11 +2866,10 @@ def get_iam_policy( if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.GetIamPolicyRequest() - + request = iam_policy_pb2.GetIamPolicyRequest() if resource is not None: request.resource = resource @@ -2963,12 +2891,12 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Returns the caller's permissions on a resource. If the resource does not exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). @@ -2988,7 +2916,6 @@ def test_iam_permissions( request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3000,14 +2927,13 @@ def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py index 9cd6e4d7..20fce356 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -247,7 +245,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -375,7 +373,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -503,7 +501,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py index f3f1cf12..f6669083 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py index 40f9af3e..4b50eee5 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import tags -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -41,27 +40,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class DataCatalogTransport(abc.ABC): """Abstract transport class for DataCatalog.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "datacatalog.googleapis.com" + def __init__( self, *, - host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -70,7 +83,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,29 +97,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -126,7 +186,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -140,7 +201,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -163,7 +225,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -177,7 +240,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -191,7 +255,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -211,7 +276,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -228,7 +294,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -257,7 +324,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,7 +345,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -291,7 +360,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -314,11 +384,11 @@ def _prep_wrapped_messages(self, client_info): @property def search_catalog( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.SearchCatalogRequest], - typing.Union[ + Union[ datacatalog.SearchCatalogResponse, - typing.Awaitable[datacatalog.SearchCatalogResponse], + Awaitable[datacatalog.SearchCatalogResponse], ], ]: raise NotImplementedError() @@ -326,47 +396,47 @@ def search_catalog( @property def create_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateEntryGroupRequest], - typing.Union[datacatalog.EntryGroup, typing.Awaitable[datacatalog.EntryGroup]], + Union[datacatalog.EntryGroup, Awaitable[datacatalog.EntryGroup]], ]: raise NotImplementedError() @property def update_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateEntryGroupRequest], - typing.Union[datacatalog.EntryGroup, typing.Awaitable[datacatalog.EntryGroup]], + Union[datacatalog.EntryGroup, Awaitable[datacatalog.EntryGroup]], ]: raise NotImplementedError() @property def get_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.GetEntryGroupRequest], - typing.Union[datacatalog.EntryGroup, typing.Awaitable[datacatalog.EntryGroup]], + Union[datacatalog.EntryGroup, Awaitable[datacatalog.EntryGroup]], ]: raise NotImplementedError() @property def delete_entry_group( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteEntryGroupRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_entry_groups( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.ListEntryGroupsRequest], - typing.Union[ + Union[ datacatalog.ListEntryGroupsResponse, - typing.Awaitable[datacatalog.ListEntryGroupsResponse], + Awaitable[datacatalog.ListEntryGroupsResponse], ], ]: raise NotImplementedError() @@ -374,56 +444,55 @@ def list_entry_groups( @property def create_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def update_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def delete_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteEntryRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.GetEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def lookup_entry( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.LookupEntryRequest], - typing.Union[datacatalog.Entry, typing.Awaitable[datacatalog.Entry]], + Union[datacatalog.Entry, Awaitable[datacatalog.Entry]], ]: raise NotImplementedError() @property def list_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.ListEntriesRequest], - typing.Union[ - datacatalog.ListEntriesResponse, - typing.Awaitable[datacatalog.ListEntriesResponse], + Union[ + datacatalog.ListEntriesResponse, Awaitable[datacatalog.ListEntriesResponse] ], ]: raise NotImplementedError() @@ -431,139 +500,131 @@ def list_entries( @property def create_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateTagTemplateRequest], - typing.Union[tags.TagTemplate, typing.Awaitable[tags.TagTemplate]], + Union[tags.TagTemplate, Awaitable[tags.TagTemplate]], ]: raise NotImplementedError() @property def get_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.GetTagTemplateRequest], - typing.Union[tags.TagTemplate, typing.Awaitable[tags.TagTemplate]], + Union[tags.TagTemplate, Awaitable[tags.TagTemplate]], ]: raise NotImplementedError() @property def update_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateTagTemplateRequest], - typing.Union[tags.TagTemplate, typing.Awaitable[tags.TagTemplate]], + Union[tags.TagTemplate, Awaitable[tags.TagTemplate]], ]: raise NotImplementedError() @property def delete_tag_template( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteTagTemplateRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def create_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.CreateTagTemplateFieldRequest], - typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], ]: raise NotImplementedError() @property def update_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.UpdateTagTemplateFieldRequest], - typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], ]: raise NotImplementedError() @property def rename_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.RenameTagTemplateFieldRequest], - typing.Union[tags.TagTemplateField, typing.Awaitable[tags.TagTemplateField]], + Union[tags.TagTemplateField, Awaitable[tags.TagTemplateField]], ]: raise NotImplementedError() @property def delete_tag_template_field( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteTagTemplateFieldRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def create_tag( self, - ) -> typing.Callable[ - [datacatalog.CreateTagRequest], - typing.Union[tags.Tag, typing.Awaitable[tags.Tag]], - ]: + ) -> Callable[[datacatalog.CreateTagRequest], Union[tags.Tag, Awaitable[tags.Tag]]]: raise NotImplementedError() @property def update_tag( self, - ) -> typing.Callable[ - [datacatalog.UpdateTagRequest], - typing.Union[tags.Tag, typing.Awaitable[tags.Tag]], - ]: + ) -> Callable[[datacatalog.UpdateTagRequest], Union[tags.Tag, Awaitable[tags.Tag]]]: raise NotImplementedError() @property def delete_tag( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.DeleteTagRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_tags( self, - ) -> typing.Callable[ + ) -> Callable[ [datacatalog.ListTagsRequest], - typing.Union[ - datacatalog.ListTagsResponse, typing.Awaitable[datacatalog.ListTagsResponse] - ], + Union[datacatalog.ListTagsResponse, Awaitable[datacatalog.ListTagsResponse]], ]: raise NotImplementedError() @property def set_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ - [iam_policy.TestIamPermissionsRequest], - typing.Union[ - iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py index 7fdff619..e5be7e9c 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import tags -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO @@ -55,7 +52,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -69,7 +66,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -179,7 +177,7 @@ def __init__( def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +208,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -362,7 +362,7 @@ def get_entry_group( @property def delete_entry_group( self, - ) -> Callable[[datacatalog.DeleteEntryGroupRequest], empty.Empty]: + ) -> Callable[[datacatalog.DeleteEntryGroupRequest], empty_pb2.Empty]: r"""Return a callable for the delete entry group method over gRPC. Deletes an EntryGroup. Only entry groups that do not contain @@ -386,7 +386,7 @@ def delete_entry_group( self._stubs["delete_entry_group"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup", request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry_group"] @@ -484,7 +484,9 @@ def update_entry( return self._stubs["update_entry"] @property - def delete_entry(self) -> Callable[[datacatalog.DeleteEntryRequest], empty.Empty]: + def delete_entry( + self, + ) -> Callable[[datacatalog.DeleteEntryRequest], empty_pb2.Empty]: r"""Return a callable for the delete entry method over gRPC. Deletes an existing entry. Only entries created through @@ -509,7 +511,7 @@ def delete_entry(self) -> Callable[[datacatalog.DeleteEntryRequest], empty.Empty self._stubs["delete_entry"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry", request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry"] @@ -685,7 +687,7 @@ def update_tag_template( @property def delete_tag_template( self, - ) -> Callable[[datacatalog.DeleteTagTemplateRequest], empty.Empty]: + ) -> Callable[[datacatalog.DeleteTagTemplateRequest], empty_pb2.Empty]: r"""Return a callable for the delete tag template method over gRPC. Deletes a tag template and all tags using the template. Users @@ -708,7 +710,7 @@ def delete_tag_template( self._stubs["delete_tag_template"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplate", request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template"] @@ -806,7 +808,7 @@ def rename_tag_template_field( @property def delete_tag_template_field( self, - ) -> Callable[[datacatalog.DeleteTagTemplateFieldRequest], empty.Empty]: + ) -> Callable[[datacatalog.DeleteTagTemplateFieldRequest], empty_pb2.Empty]: r"""Return a callable for the delete tag template field method over gRPC. Deletes a field in a tag template and all uses of that field. @@ -830,7 +832,7 @@ def delete_tag_template_field( self._stubs["delete_tag_template_field"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplateField", request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template_field"] @@ -889,7 +891,7 @@ def update_tag(self) -> Callable[[datacatalog.UpdateTagRequest], tags.Tag]: return self._stubs["update_tag"] @property - def delete_tag(self) -> Callable[[datacatalog.DeleteTagRequest], empty.Empty]: + def delete_tag(self) -> Callable[[datacatalog.DeleteTagRequest], empty_pb2.Empty]: r"""Return a callable for the delete tag method over gRPC. Deletes a tag. @@ -908,7 +910,7 @@ def delete_tag(self) -> Callable[[datacatalog.DeleteTagRequest], empty.Empty]: self._stubs["delete_tag"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTag", request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag"] @@ -942,7 +944,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -976,15 +978,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1022,8 +1024,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -1031,7 +1033,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. @@ -1063,8 +1066,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py index bdf72f4d..3043a110 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,25 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import tags -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .grpc import DataCatalogGrpcTransport @@ -58,7 +55,7 @@ class DataCatalogGrpcAsyncIOTransport(DataCatalogTransport): def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -85,13 +82,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -99,7 +98,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -113,7 +112,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -171,7 +171,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -372,7 +371,7 @@ def get_entry_group( @property def delete_entry_group( self, - ) -> Callable[[datacatalog.DeleteEntryGroupRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteEntryGroupRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete entry group method over gRPC. Deletes an EntryGroup. Only entry groups that do not contain @@ -396,7 +395,7 @@ def delete_entry_group( self._stubs["delete_entry_group"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup", request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry_group"] @@ -497,7 +496,7 @@ def update_entry( @property def delete_entry( self, - ) -> Callable[[datacatalog.DeleteEntryRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteEntryRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete entry method over gRPC. Deletes an existing entry. Only entries created through @@ -522,7 +521,7 @@ def delete_entry( self._stubs["delete_entry"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry", request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_entry"] @@ -702,7 +701,7 @@ def update_tag_template( @property def delete_tag_template( self, - ) -> Callable[[datacatalog.DeleteTagTemplateRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteTagTemplateRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete tag template method over gRPC. Deletes a tag template and all tags using the template. Users @@ -725,7 +724,7 @@ def delete_tag_template( self._stubs["delete_tag_template"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplate", request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template"] @@ -829,7 +828,9 @@ def rename_tag_template_field( @property def delete_tag_template_field( self, - ) -> Callable[[datacatalog.DeleteTagTemplateFieldRequest], Awaitable[empty.Empty]]: + ) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], Awaitable[empty_pb2.Empty] + ]: r"""Return a callable for the delete tag template field method over gRPC. Deletes a field in a tag template and all uses of that field. @@ -853,7 +854,7 @@ def delete_tag_template_field( self._stubs["delete_tag_template_field"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplateField", request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag_template_field"] @@ -918,7 +919,7 @@ def update_tag( @property def delete_tag( self, - ) -> Callable[[datacatalog.DeleteTagRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datacatalog.DeleteTagRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete tag method over gRPC. Deletes a tag. @@ -937,7 +938,7 @@ def delete_tag( self._stubs["delete_tag"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTag", request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_tag"] @@ -973,7 +974,7 @@ def list_tags( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the access control policy for a resource. Replaces any @@ -1007,15 +1008,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the access control policy for a resource. A ``NOT_FOUND`` @@ -1053,8 +1054,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -1062,8 +1063,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. @@ -1095,8 +1096,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py index 8abc6009..031ae9fa 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import PolicyTagManagerClient from .async_client import PolicyTagManagerAsyncClient diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index 7f0cbecc..a39bcfac 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport from .client import PolicyTagManagerClient @@ -52,31 +49,26 @@ class PolicyTagManagerAsyncClient: parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) - common_billing_account_path = staticmethod( PolicyTagManagerClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( PolicyTagManagerClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) parse_common_folder_path = staticmethod( PolicyTagManagerClient.parse_common_folder_path ) - common_organization_path = staticmethod( PolicyTagManagerClient.common_organization_path ) parse_common_organization_path = staticmethod( PolicyTagManagerClient.parse_common_organization_path ) - common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) parse_common_project_path = staticmethod( PolicyTagManagerClient.parse_common_project_path ) - common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) parse_common_location_path = staticmethod( PolicyTagManagerClient.parse_common_location_path @@ -84,7 +76,8 @@ class PolicyTagManagerAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -99,7 +92,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -116,7 +109,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> PolicyTagManagerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: PolicyTagManagerTransport: The transport used by the client instance. @@ -130,12 +123,12 @@ def transport(self) -> PolicyTagManagerTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the policy tag manager client. + """Instantiates the policy tag manager client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -167,7 +160,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = PolicyTagManagerClient( credentials=credentials, transport=transport, @@ -204,7 +196,6 @@ async def create_taxonomy( This corresponds to the ``taxonomy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -235,7 +226,6 @@ async def create_taxonomy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if taxonomy is not None: @@ -286,7 +276,6 @@ async def delete_taxonomy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -307,7 +296,6 @@ async def delete_taxonomy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -352,7 +340,6 @@ async def update_taxonomy( This corresponds to the ``taxonomy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -383,7 +370,6 @@ async def update_taxonomy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if taxonomy is not None: request.taxonomy = taxonomy @@ -432,7 +418,6 @@ async def list_taxonomies( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -462,7 +447,6 @@ async def list_taxonomies( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -514,7 +498,6 @@ async def get_taxonomy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -545,7 +528,6 @@ async def get_taxonomy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -598,7 +580,6 @@ async def create_policy_tag( This corresponds to the ``policy_tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -631,7 +612,6 @@ async def create_policy_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if policy_tag is not None: @@ -681,7 +661,6 @@ async def delete_policy_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -702,7 +681,6 @@ async def delete_policy_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -748,7 +726,6 @@ async def update_policy_tag( This corresponds to the ``policy_tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -781,7 +758,6 @@ async def update_policy_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if policy_tag is not None: request.policy_tag = policy_tag @@ -829,7 +805,6 @@ async def list_policy_tags( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -859,7 +834,6 @@ async def list_policy_tags( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -911,7 +885,6 @@ async def get_policy_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -944,7 +917,6 @@ async def get_policy_tag( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -970,19 +942,18 @@ async def get_policy_tag( async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM policy for a taxonomy or a policy tag. Args: request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1049,11 +1020,10 @@ async def get_iam_policy( """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1077,19 +1047,18 @@ async def get_iam_policy( async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the IAM policy for a taxonomy or a policy tag. Args: request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1156,11 +1125,10 @@ async def set_iam_policy( """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1184,12 +1152,12 @@ async def set_iam_policy( async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Returns the permissions that a caller has on the specified taxonomy or policy tag. @@ -1197,7 +1165,6 @@ async def test_iam_permissions( request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1209,11 +1176,10 @@ async def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 152d0a10..de1c2893 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +32,8 @@ from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PolicyTagManagerGrpcTransport from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport @@ -57,7 +54,7 @@ class PolicyTagManagerClientMeta(type): _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[PolicyTagManagerTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +79,8 @@ class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,7 +114,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -133,7 +132,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -152,10 +151,11 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> PolicyTagManagerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - PolicyTagManagerTransport: The transport used by the client instance. + PolicyTagManagerTransport: The transport used by the client + instance. """ return self._transport @@ -163,7 +163,7 @@ def transport(self) -> PolicyTagManagerTransport: def policy_tag_path( project: str, location: str, taxonomy: str, policy_tag: str, ) -> str: - """Return a fully-qualified policy_tag string.""" + """Returns a fully-qualified policy_tag string.""" return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( project=project, location=location, @@ -173,7 +173,7 @@ def policy_tag_path( @staticmethod def parse_policy_tag_path(path: str) -> Dict[str, str]: - """Parse a policy_tag path into its component segments.""" + """Parses a policy_tag path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", path, @@ -182,14 +182,14 @@ def parse_policy_tag_path(path: str) -> Dict[str, str]: @staticmethod def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: - """Return a fully-qualified taxonomy string.""" + """Returns a fully-qualified taxonomy string.""" return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( project=project, location=location, taxonomy=taxonomy, ) @staticmethod def parse_taxonomy_path(path: str) -> Dict[str, str]: - """Parse a taxonomy path into its component segments.""" + """Parses a taxonomy path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path, @@ -198,7 +198,7 @@ def parse_taxonomy_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -211,7 +211,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -222,7 +222,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -233,7 +233,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -244,7 +244,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -258,12 +258,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, PolicyTagManagerTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the policy tag manager client. + """Instantiates the policy tag manager client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -318,9 +318,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -332,12 +333,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -352,8 +355,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -397,7 +400,6 @@ def create_taxonomy( This corresponds to the ``taxonomy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -430,10 +432,8 @@ def create_taxonomy( # there are no flattened fields. if not isinstance(request, policytagmanager.CreateTaxonomyRequest): request = policytagmanager.CreateTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if taxonomy is not None: @@ -480,7 +480,6 @@ def delete_taxonomy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -503,10 +502,8 @@ def delete_taxonomy( # there are no flattened fields. if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): request = policytagmanager.DeleteTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -547,7 +544,6 @@ def update_taxonomy( This corresponds to the ``taxonomy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -580,10 +576,8 @@ def update_taxonomy( # there are no flattened fields. if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): request = policytagmanager.UpdateTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if taxonomy is not None: request.taxonomy = taxonomy @@ -628,7 +622,6 @@ def list_taxonomies( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -660,10 +653,8 @@ def list_taxonomies( # there are no flattened fields. if not isinstance(request, policytagmanager.ListTaxonomiesRequest): request = policytagmanager.ListTaxonomiesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -711,7 +702,6 @@ def get_taxonomy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -744,10 +734,8 @@ def get_taxonomy( # there are no flattened fields. if not isinstance(request, policytagmanager.GetTaxonomyRequest): request = policytagmanager.GetTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -796,7 +784,6 @@ def create_policy_tag( This corresponds to the ``policy_tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -831,10 +818,8 @@ def create_policy_tag( # there are no flattened fields. if not isinstance(request, policytagmanager.CreatePolicyTagRequest): request = policytagmanager.CreatePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if policy_tag is not None: @@ -880,7 +865,6 @@ def delete_policy_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -903,10 +887,8 @@ def delete_policy_tag( # there are no flattened fields. if not isinstance(request, policytagmanager.DeletePolicyTagRequest): request = policytagmanager.DeletePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -948,7 +930,6 @@ def update_policy_tag( This corresponds to the ``policy_tag`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -983,10 +964,8 @@ def update_policy_tag( # there are no flattened fields. if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): request = policytagmanager.UpdatePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if policy_tag is not None: request.policy_tag = policy_tag @@ -1030,7 +1009,6 @@ def list_policy_tags( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1062,10 +1040,8 @@ def list_policy_tags( # there are no flattened fields. if not isinstance(request, policytagmanager.ListPolicyTagsRequest): request = policytagmanager.ListPolicyTagsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1113,7 +1089,6 @@ def get_policy_tag( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1148,10 +1123,8 @@ def get_policy_tag( # there are no flattened fields. if not isinstance(request, policytagmanager.GetPolicyTagRequest): request = policytagmanager.GetPolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1173,19 +1146,18 @@ def get_policy_tag( def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM policy for a taxonomy or a policy tag. Args: request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1252,14 +1224,13 @@ def get_iam_policy( """ # Create or coerce a protobuf request object. - if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1279,19 +1250,18 @@ def get_iam_policy( def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the IAM policy for a taxonomy or a policy tag. Args: request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1358,14 +1328,13 @@ def set_iam_policy( """ # Create or coerce a protobuf request object. - if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1385,12 +1354,12 @@ def set_iam_policy( def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Returns the permissions that a caller has on the specified taxonomy or policy tag. @@ -1398,7 +1367,6 @@ def test_iam_permissions( request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1410,14 +1378,13 @@ def test_iam_permissions( Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) elif not request: # Null request, just make one. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py index 7253f781..10e51df0 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py index 95f18c5c..a527c57d 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py index 1a3c9921..6caf2360 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -40,27 +39,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class PolicyTagManagerTransport(abc.ABC): """Abstract transport class for PolicyTagManager.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "datacatalog.googleapis.com" + def __init__( self, *, - host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -69,7 +82,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -83,29 +96,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -155,42 +215,38 @@ def _prep_wrapped_messages(self, client_info): @property def create_taxonomy( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.CreateTaxonomyRequest], - typing.Union[ - policytagmanager.Taxonomy, typing.Awaitable[policytagmanager.Taxonomy] - ], + Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]], ]: raise NotImplementedError() @property def delete_taxonomy( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.DeleteTaxonomyRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def update_taxonomy( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.UpdateTaxonomyRequest], - typing.Union[ - policytagmanager.Taxonomy, typing.Awaitable[policytagmanager.Taxonomy] - ], + Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]], ]: raise NotImplementedError() @property def list_taxonomies( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.ListTaxonomiesRequest], - typing.Union[ + Union[ policytagmanager.ListTaxonomiesResponse, - typing.Awaitable[policytagmanager.ListTaxonomiesResponse], + Awaitable[policytagmanager.ListTaxonomiesResponse], ], ]: raise NotImplementedError() @@ -198,53 +254,47 @@ def list_taxonomies( @property def get_taxonomy( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.GetTaxonomyRequest], - typing.Union[ - policytagmanager.Taxonomy, typing.Awaitable[policytagmanager.Taxonomy] - ], + Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]], ]: raise NotImplementedError() @property def create_policy_tag( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.CreatePolicyTagRequest], - typing.Union[ - policytagmanager.PolicyTag, typing.Awaitable[policytagmanager.PolicyTag] - ], + Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]], ]: raise NotImplementedError() @property def delete_policy_tag( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.DeletePolicyTagRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def update_policy_tag( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.UpdatePolicyTagRequest], - typing.Union[ - policytagmanager.PolicyTag, typing.Awaitable[policytagmanager.PolicyTag] - ], + Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]], ]: raise NotImplementedError() @property def list_policy_tags( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.ListPolicyTagsRequest], - typing.Union[ + Union[ policytagmanager.ListPolicyTagsResponse, - typing.Awaitable[policytagmanager.ListPolicyTagsResponse], + Awaitable[policytagmanager.ListPolicyTagsResponse], ], ]: raise NotImplementedError() @@ -252,40 +302,38 @@ def list_policy_tags( @property def get_policy_tag( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanager.GetPolicyTagRequest], - typing.Union[ - policytagmanager.PolicyTag, typing.Awaitable[policytagmanager.PolicyTag] - ], + Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def set_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ - [iam_policy.TestIamPermissionsRequest], - typing.Union[ - iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py index 2bb4e178..ca840224 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -68,7 +65,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -178,7 +176,7 @@ def __init__( def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -209,13 +207,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -254,7 +254,7 @@ def create_taxonomy( @property def delete_taxonomy( self, - ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], empty.Empty]: + ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], empty_pb2.Empty]: r"""Return a callable for the delete taxonomy method over gRPC. Deletes a taxonomy. This operation will also delete @@ -275,7 +275,7 @@ def delete_taxonomy( self._stubs["delete_taxonomy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeleteTaxonomy", request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_taxonomy"] @@ -392,7 +392,7 @@ def create_policy_tag( @property def delete_policy_tag( self, - ) -> Callable[[policytagmanager.DeletePolicyTagRequest], empty.Empty]: + ) -> Callable[[policytagmanager.DeletePolicyTagRequest], empty_pb2.Empty]: r"""Return a callable for the delete policy tag method over gRPC. Deletes a policy tag. Also deletes all of its @@ -412,7 +412,7 @@ def delete_policy_tag( self._stubs["delete_policy_tag"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeletePolicyTag", request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_policy_tag"] @@ -502,7 +502,7 @@ def get_policy_tag( @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM policy for a taxonomy or a policy tag. @@ -520,15 +520,15 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM policy for a taxonomy or a policy tag. @@ -546,8 +546,8 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @@ -555,7 +555,8 @@ def set_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. @@ -575,8 +576,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py index 9c2dfa26..6ec735c6 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .grpc import PolicyTagManagerGrpcTransport @@ -57,7 +54,7 @@ class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -112,7 +111,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -170,7 +170,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -260,7 +259,7 @@ def create_taxonomy( @property def delete_taxonomy( self, - ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], Awaitable[empty.Empty]]: + ) -> Callable[[policytagmanager.DeleteTaxonomyRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete taxonomy method over gRPC. Deletes a taxonomy. This operation will also delete @@ -281,7 +280,7 @@ def delete_taxonomy( self._stubs["delete_taxonomy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeleteTaxonomy", request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_taxonomy"] @@ -402,7 +401,9 @@ def create_policy_tag( @property def delete_policy_tag( self, - ) -> Callable[[policytagmanager.DeletePolicyTagRequest], Awaitable[empty.Empty]]: + ) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], Awaitable[empty_pb2.Empty] + ]: r"""Return a callable for the delete policy tag method over gRPC. Deletes a policy tag. Also deletes all of its @@ -422,7 +423,7 @@ def delete_policy_tag( self._stubs["delete_policy_tag"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeletePolicyTag", request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_policy_tag"] @@ -514,7 +515,7 @@ def get_policy_tag( @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM policy for a taxonomy or a policy tag. @@ -532,15 +533,15 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM policy for a taxonomy or a policy tag. @@ -558,8 +559,8 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @@ -567,8 +568,8 @@ def set_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. @@ -588,8 +589,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.cloud.datacatalog.v1beta1.PolicyTagManager/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py index 16fecda2..51e547a2 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import PolicyTagManagerSerializationClient from .async_client import PolicyTagManagerSerializationAsyncClient diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py index 40eda2b7..d1a5a6cb 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,15 +20,14 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization - from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport from .client import PolicyTagManagerSerializationClient @@ -51,35 +48,30 @@ class PolicyTagManagerSerializationAsyncClient: parse_taxonomy_path = staticmethod( PolicyTagManagerSerializationClient.parse_taxonomy_path ) - common_billing_account_path = staticmethod( PolicyTagManagerSerializationClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( PolicyTagManagerSerializationClient.parse_common_billing_account_path ) - common_folder_path = staticmethod( PolicyTagManagerSerializationClient.common_folder_path ) parse_common_folder_path = staticmethod( PolicyTagManagerSerializationClient.parse_common_folder_path ) - common_organization_path = staticmethod( PolicyTagManagerSerializationClient.common_organization_path ) parse_common_organization_path = staticmethod( PolicyTagManagerSerializationClient.parse_common_organization_path ) - common_project_path = staticmethod( PolicyTagManagerSerializationClient.common_project_path ) parse_common_project_path = staticmethod( PolicyTagManagerSerializationClient.parse_common_project_path ) - common_location_path = staticmethod( PolicyTagManagerSerializationClient.common_location_path ) @@ -89,7 +81,8 @@ class PolicyTagManagerSerializationAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -104,7 +97,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -121,7 +114,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> PolicyTagManagerSerializationTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: PolicyTagManagerSerializationTransport: The transport used by the client instance. @@ -136,12 +129,12 @@ def transport(self) -> PolicyTagManagerSerializationTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the policy tag manager serialization client. + """Instantiates the policy tag manager serialization client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -173,7 +166,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = PolicyTagManagerSerializationClient( credentials=credentials, transport=transport, @@ -199,7 +191,6 @@ async def import_taxonomies( request (:class:`google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest`): The request object. Request message for [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -213,7 +204,6 @@ async def import_taxonomies( """ # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ImportTaxonomiesRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -254,7 +244,6 @@ async def export_taxonomies( request (:class:`google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest`): The request object. Request message for [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -268,7 +257,6 @@ async def export_taxonomies( """ # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ExportTaxonomiesRequest(request) # Wrap the RPC method; this adds retry and timeout information, diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index 59221c65..db07f881 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,7 +32,6 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization - from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PolicyTagManagerSerializationGrpcTransport from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport @@ -59,7 +56,7 @@ class PolicyTagManagerSerializationClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[PolicyTagManagerSerializationTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -87,7 +84,8 @@ class PolicyTagManagerSerializationClient( @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -121,7 +119,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -138,7 +137,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -157,23 +156,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> PolicyTagManagerSerializationTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - PolicyTagManagerSerializationTransport: The transport used by the client instance. + PolicyTagManagerSerializationTransport: The transport used by the client + instance. """ return self._transport @staticmethod def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: - """Return a fully-qualified taxonomy string.""" + """Returns a fully-qualified taxonomy string.""" return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( project=project, location=location, taxonomy=taxonomy, ) @staticmethod def parse_taxonomy_path(path: str) -> Dict[str, str]: - """Parse a taxonomy path into its component segments.""" + """Parses a taxonomy path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path, @@ -182,7 +182,7 @@ def parse_taxonomy_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -195,7 +195,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -206,7 +206,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -217,7 +217,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -228,7 +228,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -242,12 +242,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, PolicyTagManagerSerializationTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the policy tag manager serialization client. + """Instantiates the policy tag manager serialization client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -302,9 +302,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -316,12 +317,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -336,8 +339,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -370,7 +373,6 @@ def import_taxonomies( request (google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest): The request object. Request message for [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -384,7 +386,6 @@ def import_taxonomies( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a policytagmanagerserialization.ImportTaxonomiesRequest. # There's no risk of modifying the input as we've already verified @@ -428,7 +429,6 @@ def export_taxonomies( request (google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest): The request object. Request message for [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -442,7 +442,6 @@ def export_taxonomies( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a policytagmanagerserialization.ExportTaxonomiesRequest. # There's no risk of modifying the input as we've already verified diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py index 1e108bd2..a198169c 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py index f948da12..b1b0ffaf 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class PolicyTagManagerSerializationTransport(abc.ABC): """Abstract transport class for PolicyTagManagerSerialization.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "datacatalog.googleapis.com" + def __init__( self, *, - host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -117,11 +177,11 @@ def _prep_wrapped_messages(self, client_info): @property def import_taxonomies( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanagerserialization.ImportTaxonomiesRequest], - typing.Union[ + Union[ policytagmanagerserialization.ImportTaxonomiesResponse, - typing.Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse], + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse], ], ]: raise NotImplementedError() @@ -129,11 +189,11 @@ def import_taxonomies( @property def export_taxonomies( self, - ) -> typing.Callable[ + ) -> Callable[ [policytagmanagerserialization.ExportTaxonomiesRequest], - typing.Union[ + Union[ policytagmanagerserialization.ExportTaxonomiesResponse, - typing.Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse], + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse], ], ]: raise NotImplementedError() diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py index 5c3af927..c23df25d 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization - from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -68,7 +65,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -178,7 +176,7 @@ def __init__( def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -209,13 +207,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py index f230f78e..836be3e4 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization - from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO from .grpc import PolicyTagManagerSerializationGrpcTransport @@ -57,7 +54,7 @@ class PolicyTagManagerSerializationGrpcAsyncIOTransport( def create_channel( cls, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "datacatalog.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -112,7 +111,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -170,7 +170,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint diff --git a/google/cloud/datacatalog_v1beta1/types/__init__.py b/google/cloud/datacatalog_v1beta1/types/__init__.py index 55067b1a..54faa0ae 100644 --- a/google/cloud/datacatalog_v1beta1/types/__init__.py +++ b/google/cloud/datacatalog_v1beta1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .datacatalog import ( CreateEntryGroupRequest, CreateEntryRequest, diff --git a/google/cloud/datacatalog_v1beta1/types/common.py b/google/cloud/datacatalog_v1beta1/types/common.py index 73167f1e..9b90cb8c 100644 --- a/google/cloud/datacatalog_v1beta1/types/common.py +++ b/google/cloud/datacatalog_v1beta1/types/common.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore diff --git a/google/cloud/datacatalog_v1beta1/types/datacatalog.py b/google/cloud/datacatalog_v1beta1/types/datacatalog.py index f12ca6e6..f5a6044d 100644 --- a/google/cloud/datacatalog_v1beta1/types/datacatalog.py +++ b/google/cloud/datacatalog_v1beta1/types/datacatalog.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datacatalog_v1beta1.types import common from google.cloud.datacatalog_v1beta1.types import ( gcs_fileset_spec as gcd_gcs_fileset_spec, @@ -27,7 +24,7 @@ from google.cloud.datacatalog_v1beta1.types import table_spec from google.cloud.datacatalog_v1beta1.types import tags as gcd_tags from google.cloud.datacatalog_v1beta1.types import timestamps -from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore __protobuf__ = proto.module( @@ -150,21 +147,15 @@ class Scope(proto.Message): By default, GCP public datasets are excluded. """ - include_org_ids = proto.RepeatedField(proto.STRING, number=2) - - include_project_ids = proto.RepeatedField(proto.STRING, number=3) - - include_gcp_public_datasets = proto.Field(proto.BOOL, number=7) + include_org_ids = proto.RepeatedField(proto.STRING, number=2,) + include_project_ids = proto.RepeatedField(proto.STRING, number=3,) + include_gcp_public_datasets = proto.Field(proto.BOOL, number=7,) scope = proto.Field(proto.MESSAGE, number=6, message=Scope,) - - query = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - order_by = proto.Field(proto.STRING, number=5) + query = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + order_by = proto.Field(proto.STRING, number=5,) class SearchCatalogResponse(proto.Message): @@ -186,8 +177,7 @@ def raw_page(self): results = proto.RepeatedField( proto.MESSAGE, number=1, message=search.SearchCatalogResult, ) - - next_page_token = proto.Field(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=3,) class CreateEntryGroupRequest(proto.Message): @@ -214,10 +204,8 @@ class CreateEntryGroupRequest(proto.Message): empty entry group. """ - parent = proto.Field(proto.STRING, number=1) - - entry_group_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + entry_group_id = proto.Field(proto.STRING, number=3,) entry_group = proto.Field(proto.MESSAGE, number=2, message="EntryGroup",) @@ -236,8 +224,9 @@ class UpdateEntryGroupRequest(proto.Message): """ entry_group = proto.Field(proto.MESSAGE, number=1, message="EntryGroup",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class GetEntryGroupRequest(proto.Message): @@ -253,9 +242,8 @@ class GetEntryGroupRequest(proto.Message): all fields are returned. """ - name = proto.Field(proto.STRING, number=1) - - read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + name = proto.Field(proto.STRING, number=1,) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,) class DeleteEntryGroupRequest(proto.Message): @@ -271,9 +259,8 @@ class DeleteEntryGroupRequest(proto.Message): entry group. """ - name = proto.Field(proto.STRING, number=1) - - force = proto.Field(proto.BOOL, number=2) + name = proto.Field(proto.STRING, number=1,) + force = proto.Field(proto.BOOL, number=2,) class ListEntryGroupsRequest(proto.Message): @@ -295,11 +282,9 @@ class ListEntryGroupsRequest(proto.Message): requested. If empty, the first page is returned. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListEntryGroupsResponse(proto.Message): @@ -320,8 +305,7 @@ def raw_page(self): return self entry_groups = proto.RepeatedField(proto.MESSAGE, number=1, message="EntryGroup",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateEntryRequest(proto.Message): @@ -343,10 +327,8 @@ class CreateEntryRequest(proto.Message): Required. The entry to create. """ - parent = proto.Field(proto.STRING, number=1) - - entry_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + entry_id = proto.Field(proto.STRING, number=3,) entry = proto.Field(proto.MESSAGE, number=2, message="Entry",) @@ -388,8 +370,9 @@ class UpdateEntryRequest(proto.Message): """ entry = proto.Field(proto.MESSAGE, number=1, message="Entry",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class DeleteEntryRequest(proto.Message): @@ -403,7 +386,7 @@ class DeleteEntryRequest(proto.Message): - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class GetEntryRequest(proto.Message): @@ -417,7 +400,7 @@ class GetEntryRequest(proto.Message): - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class LookupEntryRequest(proto.Message): @@ -451,9 +434,8 @@ class LookupEntryRequest(proto.Message): https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. """ - linked_resource = proto.Field(proto.STRING, number=1, oneof="target_name") - - sql_resource = proto.Field(proto.STRING, number=3, oneof="target_name") + linked_resource = proto.Field(proto.STRING, number=1, oneof="target_name",) + sql_resource = proto.Field(proto.STRING, number=3, oneof="target_name",) class Entry(proto.Message): @@ -551,47 +533,35 @@ class Entry(proto.Message): an empty timestamp. """ - name = proto.Field(proto.STRING, number=1) - - linked_resource = proto.Field(proto.STRING, number=9) - + name = proto.Field(proto.STRING, number=1,) + linked_resource = proto.Field(proto.STRING, number=9,) type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) - - user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type") - + user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type",) integrated_system = proto.Field( proto.ENUM, number=17, oneof="system", enum=common.IntegratedSystem, ) - - user_specified_system = proto.Field(proto.STRING, number=18, oneof="system") - + user_specified_system = proto.Field(proto.STRING, number=18, oneof="system",) gcs_fileset_spec = proto.Field( proto.MESSAGE, number=6, oneof="type_spec", message=gcd_gcs_fileset_spec.GcsFilesetSpec, ) - bigquery_table_spec = proto.Field( proto.MESSAGE, number=12, oneof="type_spec", message=table_spec.BigQueryTableSpec, ) - bigquery_date_sharded_spec = proto.Field( proto.MESSAGE, number=15, oneof="type_spec", message=table_spec.BigQueryDateShardedSpec, ) - - display_name = proto.Field(proto.STRING, number=3) - - description = proto.Field(proto.STRING, number=4) - + display_name = proto.Field(proto.STRING, number=3,) + description = proto.Field(proto.STRING, number=4,) schema = proto.Field(proto.MESSAGE, number=5, message=gcd_schema.Schema,) - source_system_timestamps = proto.Field( proto.MESSAGE, number=7, message=timestamps.SystemTimestamps, ) @@ -624,12 +594,9 @@ class EntryGroup(proto.Message): EntryGroup. Default value is empty timestamps. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) data_catalog_timestamps = proto.Field( proto.MESSAGE, number=4, message=timestamps.SystemTimestamps, ) @@ -654,10 +621,8 @@ class CreateTagTemplateRequest(proto.Message): Required. The tag template to create. """ - parent = proto.Field(proto.STRING, number=1) - - tag_template_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + tag_template_id = proto.Field(proto.STRING, number=3,) tag_template = proto.Field(proto.MESSAGE, number=2, message=gcd_tags.TagTemplate,) @@ -672,7 +637,7 @@ class GetTagTemplateRequest(proto.Message): - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateTagTemplateRequest(proto.Message): @@ -696,8 +661,9 @@ class UpdateTagTemplateRequest(proto.Message): """ tag_template = proto.Field(proto.MESSAGE, number=1, message=gcd_tags.TagTemplate,) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class DeleteTagTemplateRequest(proto.Message): @@ -716,9 +682,8 @@ class DeleteTagTemplateRequest(proto.Message): the future. """ - name = proto.Field(proto.STRING, number=1) - - force = proto.Field(proto.BOOL, number=2) + name = proto.Field(proto.STRING, number=1,) + force = proto.Field(proto.BOOL, number=2,) class CreateTagRequest(proto.Message): @@ -738,8 +703,7 @@ class CreateTagRequest(proto.Message): Required. The tag to create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) tag = proto.Field(proto.MESSAGE, number=2, message=gcd_tags.Tag,) @@ -758,8 +722,9 @@ class UpdateTagRequest(proto.Message): """ tag = proto.Field(proto.MESSAGE, number=1, message=gcd_tags.Tag,) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class DeleteTagRequest(proto.Message): @@ -773,7 +738,7 @@ class DeleteTagRequest(proto.Message): - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateTagTemplateFieldRequest(proto.Message): @@ -798,10 +763,8 @@ class CreateTagTemplateFieldRequest(proto.Message): Required. The tag template field to create. """ - parent = proto.Field(proto.STRING, number=1) - - tag_template_field_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + tag_template_field_id = proto.Field(proto.STRING, number=2,) tag_template_field = proto.Field( proto.MESSAGE, number=3, message=gcd_tags.TagTemplateField, ) @@ -836,13 +799,13 @@ class UpdateTagTemplateFieldRequest(proto.Message): is NOT allowed. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) tag_template_field = proto.Field( proto.MESSAGE, number=2, message=gcd_tags.TagTemplateField, ) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class RenameTagTemplateFieldRequest(proto.Message): @@ -859,9 +822,8 @@ class RenameTagTemplateFieldRequest(proto.Message): example, ``my_new_field``. """ - name = proto.Field(proto.STRING, number=1) - - new_tag_template_field_id = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + new_tag_template_field_id = proto.Field(proto.STRING, number=2,) class DeleteTagTemplateFieldRequest(proto.Message): @@ -881,9 +843,8 @@ class DeleteTagTemplateFieldRequest(proto.Message): in the future. """ - name = proto.Field(proto.STRING, number=1) - - force = proto.Field(proto.BOOL, number=2) + name = proto.Field(proto.STRING, number=1,) + force = proto.Field(proto.BOOL, number=2,) class ListTagsRequest(proto.Message): @@ -909,11 +870,9 @@ class ListTagsRequest(proto.Message): If empty, the first page is returned. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListTagsResponse(proto.Message): @@ -934,8 +893,7 @@ def raw_page(self): return self tags = proto.RepeatedField(proto.MESSAGE, number=1, message=gcd_tags.Tag,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListEntriesRequest(proto.Message): @@ -962,13 +920,10 @@ class ListEntriesRequest(proto.Message): return a list of Entries with only "name" field. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask,) class ListEntriesResponse(proto.Message): @@ -989,8 +944,7 @@ def raw_page(self): return self entries = proto.RepeatedField(proto.MESSAGE, number=1, message="Entry",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py b/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py index 68826009..475f9b7b 100644 --- a/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py +++ b/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datacatalog_v1beta1.types import timestamps @@ -29,7 +26,6 @@ class GcsFilesetSpec(proto.Message): r"""Describes a Cloud Storage fileset entry. - Attributes: file_patterns (Sequence[str]): Required. Patterns to identify a set of files in Google @@ -70,8 +66,7 @@ class GcsFilesetSpec(proto.Message): are represented here. """ - file_patterns = proto.RepeatedField(proto.STRING, number=1) - + file_patterns = proto.RepeatedField(proto.STRING, number=1,) sample_gcs_file_specs = proto.RepeatedField( proto.MESSAGE, number=2, message="GcsFileSpec", ) @@ -79,7 +74,6 @@ class GcsFilesetSpec(proto.Message): class GcsFileSpec(proto.Message): r"""Specifications of a single file in Cloud Storage. - Attributes: file_path (str): Required. The full file path. Example: @@ -91,13 +85,11 @@ class GcsFileSpec(proto.Message): Output only. The size of the file, in bytes. """ - file_path = proto.Field(proto.STRING, number=1) - + file_path = proto.Field(proto.STRING, number=1,) gcs_timestamps = proto.Field( proto.MESSAGE, number=2, message=timestamps.SystemTimestamps, ) - - size_bytes = proto.Field(proto.INT64, number=4) + size_bytes = proto.Field(proto.INT64, number=4,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/google/cloud/datacatalog_v1beta1/types/policytagmanager.py index f3478c90..3b48aa3d 100644 --- a/google/cloud/datacatalog_v1beta1/types/policytagmanager.py +++ b/google/cloud/datacatalog_v1beta1/types/policytagmanager.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore __protobuf__ = proto.module( @@ -78,12 +75,9 @@ class PolicyType(proto.Enum): POLICY_TYPE_UNSPECIFIED = 0 FINE_GRAINED_ACCESS_CONTROL = 1 - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) activated_policy_types = proto.RepeatedField(proto.ENUM, number=6, enum=PolicyType,) @@ -127,15 +121,11 @@ class PolicyTag(proto.Message): tags of this policy tag. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - - parent_policy_tag = proto.Field(proto.STRING, number=4) - - child_policy_tags = proto.RepeatedField(proto.STRING, number=5) + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + parent_policy_tag = proto.Field(proto.STRING, number=4,) + child_policy_tags = proto.RepeatedField(proto.STRING, number=5,) class CreateTaxonomyRequest(proto.Message): @@ -150,8 +140,7 @@ class CreateTaxonomyRequest(proto.Message): The taxonomy to be created. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) taxonomy = proto.Field(proto.MESSAGE, number=2, message="Taxonomy",) @@ -166,7 +155,7 @@ class DeleteTaxonomyRequest(proto.Message): also be deleted. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateTaxonomyRequest(proto.Message): @@ -186,8 +175,9 @@ class UpdateTaxonomyRequest(proto.Message): """ taxonomy = proto.Field(proto.MESSAGE, number=1, message="Taxonomy",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class ListTaxonomiesRequest(proto.Message): @@ -207,11 +197,9 @@ class ListTaxonomiesRequest(proto.Message): request, if any. If not set, defaults to an empty string. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListTaxonomiesResponse(proto.Message): @@ -232,8 +220,7 @@ def raw_page(self): return self taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message="Taxonomy",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetTaxonomyRequest(proto.Message): @@ -246,7 +233,7 @@ class GetTaxonomyRequest(proto.Message): taxonomy. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreatePolicyTagRequest(proto.Message): @@ -261,8 +248,7 @@ class CreatePolicyTagRequest(proto.Message): The policy tag to be created. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) policy_tag = proto.Field(proto.MESSAGE, number=2, message="PolicyTag",) @@ -277,7 +263,7 @@ class DeletePolicyTagRequest(proto.Message): will also be deleted. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdatePolicyTagRequest(proto.Message): @@ -301,8 +287,9 @@ class UpdatePolicyTagRequest(proto.Message): """ policy_tag = proto.Field(proto.MESSAGE, number=1, message="PolicyTag",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class ListPolicyTagsRequest(proto.Message): @@ -322,11 +309,9 @@ class ListPolicyTagsRequest(proto.Message): request, if any. If not set, defaults to an empty string. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListPolicyTagsResponse(proto.Message): @@ -348,8 +333,7 @@ def raw_page(self): return self policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message="PolicyTag",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetPolicyTagRequest(proto.Message): @@ -362,7 +346,7 @@ class GetPolicyTagRequest(proto.Message): policy tag. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py b/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py index eba6c7b6..d506e649 100644 --- a/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py +++ b/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.datacatalog_v1beta1.types import policytagmanager @@ -53,10 +50,8 @@ class SerializedTaxonomy(proto.Message): taxonomy if any. """ - display_name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - + display_name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) policy_tags = proto.RepeatedField( proto.MESSAGE, number=3, message="SerializedPolicyTag", ) @@ -79,10 +74,8 @@ class SerializedPolicyTag(proto.Message): Children of the policy tag if any. """ - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) child_policy_tags = proto.RepeatedField( proto.MESSAGE, number=4, message="SerializedPolicyTag", ) @@ -100,8 +93,7 @@ class ImportTaxonomiesRequest(proto.Message): Inline source used for taxonomies import """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) inline_source = proto.Field( proto.MESSAGE, number=2, oneof="source", message="InlineSource", ) @@ -109,7 +101,6 @@ class ImportTaxonomiesRequest(proto.Message): class InlineSource(proto.Message): r"""Inline source used for taxonomies import. - Attributes: taxonomies (Sequence[google.cloud.datacatalog_v1beta1.types.SerializedTaxonomy]): Required. Taxonomies to be imported. @@ -149,11 +140,9 @@ class ExportTaxonomiesRequest(proto.Message): Export taxonomies as serialized taxonomies. """ - parent = proto.Field(proto.STRING, number=1) - - taxonomies = proto.RepeatedField(proto.STRING, number=2) - - serialized_taxonomies = proto.Field(proto.BOOL, number=3, oneof="destination") + parent = proto.Field(proto.STRING, number=1,) + taxonomies = proto.RepeatedField(proto.STRING, number=2,) + serialized_taxonomies = proto.Field(proto.BOOL, number=3, oneof="destination",) class ExportTaxonomiesResponse(proto.Message): diff --git a/google/cloud/datacatalog_v1beta1/types/schema.py b/google/cloud/datacatalog_v1beta1/types/schema.py index 51c2c566..080f3b71 100644 --- a/google/cloud/datacatalog_v1beta1/types/schema.py +++ b/google/cloud/datacatalog_v1beta1/types/schema.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -25,7 +23,6 @@ class Schema(proto.Message): r"""Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema). - Attributes: columns (Sequence[google.cloud.datacatalog_v1beta1.types.ColumnSchema]): Required. Schema of columns. A maximum of @@ -57,14 +54,10 @@ class ColumnSchema(proto.Message): have zero or more sub-columns. """ - column = proto.Field(proto.STRING, number=6) - - type_ = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - mode = proto.Field(proto.STRING, number=3) - + column = proto.Field(proto.STRING, number=6,) + type_ = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + mode = proto.Field(proto.STRING, number=3,) subcolumns = proto.RepeatedField(proto.MESSAGE, number=7, message="ColumnSchema",) diff --git a/google/cloud/datacatalog_v1beta1/types/search.py b/google/cloud/datacatalog_v1beta1/types/search.py index 5c4d9568..4598d4a8 100644 --- a/google/cloud/datacatalog_v1beta1/types/search.py +++ b/google/cloud/datacatalog_v1beta1/types/search.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -66,12 +64,9 @@ class SearchCatalogResult(proto.Message): """ search_result_type = proto.Field(proto.ENUM, number=1, enum="SearchResultType",) - - search_result_subtype = proto.Field(proto.STRING, number=2) - - relative_resource_name = proto.Field(proto.STRING, number=3) - - linked_resource = proto.Field(proto.STRING, number=4) + search_result_subtype = proto.Field(proto.STRING, number=2,) + relative_resource_name = proto.Field(proto.STRING, number=3,) + linked_resource = proto.Field(proto.STRING, number=4,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1beta1/types/table_spec.py b/google/cloud/datacatalog_v1beta1/types/table_spec.py index 8c041930..790a59bd 100644 --- a/google/cloud/datacatalog_v1beta1/types/table_spec.py +++ b/google/cloud/datacatalog_v1beta1/types/table_spec.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -39,7 +37,6 @@ class TableSourceType(proto.Enum): class BigQueryTableSpec(proto.Message): r"""Describes a BigQuery table. - Attributes: table_source_type (google.cloud.datacatalog_v1beta1.types.TableSourceType): Output only. The table source type. @@ -52,11 +49,9 @@ class BigQueryTableSpec(proto.Message): """ table_source_type = proto.Field(proto.ENUM, number=1, enum="TableSourceType",) - view_spec = proto.Field( proto.MESSAGE, number=2, oneof="type_spec", message="ViewSpec", ) - table_spec = proto.Field( proto.MESSAGE, number=3, oneof="type_spec", message="TableSpec", ) @@ -64,19 +59,17 @@ class BigQueryTableSpec(proto.Message): class ViewSpec(proto.Message): r"""Table view specification. - Attributes: view_query (str): Output only. The query that defines the table view. """ - view_query = proto.Field(proto.STRING, number=1) + view_query = proto.Field(proto.STRING, number=1,) class TableSpec(proto.Message): r"""Normal BigQuery table spec. - Attributes: grouped_entry (str): Output only. If the table is a dated shard, i.e., with name @@ -87,7 +80,7 @@ class TableSpec(proto.Message): Otherwise, ``grouped_entry`` is empty. """ - grouped_entry = proto.Field(proto.STRING, number=1) + grouped_entry = proto.Field(proto.STRING, number=1,) class BigQueryDateShardedSpec(proto.Message): @@ -109,11 +102,9 @@ class BigQueryDateShardedSpec(proto.Message): Output only. Total number of shards. """ - dataset = proto.Field(proto.STRING, number=1) - - table_prefix = proto.Field(proto.STRING, number=2) - - shard_count = proto.Field(proto.INT64, number=3) + dataset = proto.Field(proto.STRING, number=1,) + table_prefix = proto.Field(proto.STRING, number=2,) + shard_count = proto.Field(proto.INT64, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datacatalog_v1beta1/types/tags.py b/google/cloud/datacatalog_v1beta1/types/tags.py index 575e9964..8cf07527 100644 --- a/google/cloud/datacatalog_v1beta1/types/tags.py +++ b/google/cloud/datacatalog_v1beta1/types/tags.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -71,14 +68,10 @@ class Tag(proto.Message): and at most 500 fields. """ - name = proto.Field(proto.STRING, number=1) - - template = proto.Field(proto.STRING, number=2) - - template_display_name = proto.Field(proto.STRING, number=5) - - column = proto.Field(proto.STRING, number=4, oneof="scope") - + name = proto.Field(proto.STRING, number=1,) + template = proto.Field(proto.STRING, number=2,) + template_display_name = proto.Field(proto.STRING, number=5,) + column = proto.Field(proto.STRING, number=4, oneof="scope",) fields = proto.MapField(proto.STRING, proto.MESSAGE, number=3, message="TagField",) @@ -117,29 +110,22 @@ class TagField(proto.Message): class EnumValue(proto.Message): r"""Holds an enum value. - Attributes: display_name (str): The display name of the enum value. """ - display_name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=1) - - double_value = proto.Field(proto.DOUBLE, number=2, oneof="kind") - - string_value = proto.Field(proto.STRING, number=3, oneof="kind") - - bool_value = proto.Field(proto.BOOL, number=4, oneof="kind") + display_name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=1,) + double_value = proto.Field(proto.DOUBLE, number=2, oneof="kind",) + string_value = proto.Field(proto.STRING, number=3, oneof="kind",) + bool_value = proto.Field(proto.BOOL, number=4, oneof="kind",) timestamp_value = proto.Field( - proto.MESSAGE, number=5, oneof="kind", message=timestamp.Timestamp, + proto.MESSAGE, number=5, oneof="kind", message=timestamp_pb2.Timestamp, ) - enum_value = proto.Field(proto.MESSAGE, number=6, oneof="kind", message=EnumValue,) - - order = proto.Field(proto.INT32, number=7) + order = proto.Field(proto.INT32, number=7,) class TagTemplate(proto.Message): @@ -178,10 +164,8 @@ class TagTemplate(proto.Message): must start with a letter or underscore. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) fields = proto.MapField( proto.STRING, proto.MESSAGE, number=3, message="TagTemplateField", ) @@ -189,7 +173,6 @@ class TagTemplate(proto.Message): class TagTemplateField(proto.Message): r"""The template for an individual field within a tag template. - Attributes: name (str): Output only. The resource name of the tag template field in @@ -217,20 +200,15 @@ class TagTemplateField(proto.Message): to be sequential. """ - name = proto.Field(proto.STRING, number=6) - - display_name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=6,) + display_name = proto.Field(proto.STRING, number=1,) type_ = proto.Field(proto.MESSAGE, number=2, message="FieldType",) - - is_required = proto.Field(proto.BOOL, number=3) - - order = proto.Field(proto.INT32, number=5) + is_required = proto.Field(proto.BOOL, number=3,) + order = proto.Field(proto.INT32, number=5,) class FieldType(proto.Message): r""" - Attributes: primitive_type (google.cloud.datacatalog_v1beta1.types.FieldType.PrimitiveType): Represents primitive types - string, bool @@ -249,7 +227,6 @@ class PrimitiveType(proto.Enum): class EnumType(proto.Message): r""" - Attributes: allowed_values (Sequence[google.cloud.datacatalog_v1beta1.types.FieldType.EnumType.EnumValue]): Required on create; optional on update. The @@ -266,14 +243,13 @@ class EnumType(proto.Message): class EnumValue(proto.Message): r""" - Attributes: display_name (str): Required. The display name of the enum value. Must not be an empty string. """ - display_name = proto.Field(proto.STRING, number=1) + display_name = proto.Field(proto.STRING, number=1,) allowed_values = proto.RepeatedField( proto.MESSAGE, number=1, message="FieldType.EnumType.EnumValue", @@ -282,7 +258,6 @@ class EnumValue(proto.Message): primitive_type = proto.Field( proto.ENUM, number=1, oneof="type_decl", enum=PrimitiveType, ) - enum_type = proto.Field( proto.MESSAGE, number=2, oneof="type_decl", message=EnumType, ) diff --git a/google/cloud/datacatalog_v1beta1/types/timestamps.py b/google/cloud/datacatalog_v1beta1/types/timestamps.py index fe45394c..4bff13c3 100644 --- a/google/cloud/datacatalog_v1beta1/types/timestamps.py +++ b/google/cloud/datacatalog_v1beta1/types/timestamps.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -43,11 +40,9 @@ class SystemTimestamps(proto.Message): apllicable to BigQuery resources. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_datacatalog_v1_keywords.py b/scripts/fixup_datacatalog_v1_keywords.py index 04befa38..38e83db3 100644 --- a/scripts/fixup_datacatalog_v1_keywords.py +++ b/scripts/fixup_datacatalog_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,34 +39,46 @@ def partition( class datacatalogCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_entry': ('parent', 'entry_id', 'entry', ), - 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), - 'create_tag': ('parent', 'tag', ), - 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), - 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), - 'delete_entry': ('name', ), - 'delete_entry_group': ('name', 'force', ), - 'delete_tag': ('name', ), - 'delete_tag_template': ('name', 'force', ), - 'delete_tag_template_field': ('name', 'force', ), - 'get_entry': ('name', ), - 'get_entry_group': ('name', 'read_mask', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_tag_template': ('name', ), - 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), - 'list_entry_groups': ('parent', 'page_size', 'page_token', ), - 'list_tags': ('parent', 'page_size', 'page_token', ), - 'lookup_entry': ('linked_resource', 'sql_resource', ), - 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), - 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_entry': ('entry', 'update_mask', ), - 'update_entry_group': ('entry_group', 'update_mask', ), - 'update_tag': ('tag', 'update_mask', ), - 'update_tag_template': ('tag_template', 'update_mask', ), - 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), - + 'create_entry': ('parent', 'entry_id', 'entry', ), + 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), + 'create_policy_tag': ('parent', 'policy_tag', ), + 'create_tag': ('parent', 'tag', ), + 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), + 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), + 'create_taxonomy': ('parent', 'taxonomy', ), + 'delete_entry': ('name', ), + 'delete_entry_group': ('name', 'force', ), + 'delete_policy_tag': ('name', ), + 'delete_tag': ('name', ), + 'delete_tag_template': ('name', 'force', ), + 'delete_tag_template_field': ('name', 'force', ), + 'delete_taxonomy': ('name', ), + 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), + 'get_entry': ('name', ), + 'get_entry_group': ('name', 'read_mask', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_policy_tag': ('name', ), + 'get_tag_template': ('name', ), + 'get_taxonomy': ('name', ), + 'import_taxonomies': ('parent', 'inline_source', 'cross_regional_source', ), + 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), + 'list_entry_groups': ('parent', 'page_size', 'page_token', ), + 'list_policy_tags': ('parent', 'page_size', 'page_token', ), + 'list_tags': ('parent', 'page_size', 'page_token', ), + 'list_taxonomies': ('parent', 'page_size', 'page_token', ), + 'lookup_entry': ('linked_resource', 'sql_resource', 'fully_qualified_name', ), + 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), + 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), + 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_entry': ('entry', 'update_mask', ), + 'update_entry_group': ('entry_group', 'update_mask', ), + 'update_policy_tag': ('policy_tag', 'update_mask', ), + 'update_tag': ('tag', 'update_mask', ), + 'update_tag_template': ('tag_template', 'update_mask', ), + 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), + 'update_taxonomy': ('taxonomy', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -99,7 +109,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_datacatalog_v1beta1_keywords.py b/scripts/fixup_datacatalog_v1beta1_keywords.py index bf43018a..e6399586 100644 --- a/scripts/fixup_datacatalog_v1beta1_keywords.py +++ b/scripts/fixup_datacatalog_v1beta1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,46 +39,45 @@ def partition( class datacatalogCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_entry': ('parent', 'entry_id', 'entry', ), - 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), - 'create_policy_tag': ('parent', 'policy_tag', ), - 'create_tag': ('parent', 'tag', ), - 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), - 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), - 'create_taxonomy': ('parent', 'taxonomy', ), - 'delete_entry': ('name', ), - 'delete_entry_group': ('name', 'force', ), - 'delete_policy_tag': ('name', ), - 'delete_tag': ('name', ), - 'delete_tag_template': ('name', 'force', ), - 'delete_tag_template_field': ('name', 'force', ), - 'delete_taxonomy': ('name', ), - 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), - 'get_entry': ('name', ), - 'get_entry_group': ('name', 'read_mask', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_policy_tag': ('name', ), - 'get_tag_template': ('name', ), - 'get_taxonomy': ('name', ), - 'import_taxonomies': ('parent', 'inline_source', ), - 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), - 'list_entry_groups': ('parent', 'page_size', 'page_token', ), - 'list_policy_tags': ('parent', 'page_size', 'page_token', ), - 'list_tags': ('parent', 'page_size', 'page_token', ), - 'list_taxonomies': ('parent', 'page_size', 'page_token', ), - 'lookup_entry': ('linked_resource', 'sql_resource', ), - 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), - 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_entry': ('entry', 'update_mask', ), - 'update_entry_group': ('entry_group', 'update_mask', ), - 'update_policy_tag': ('policy_tag', 'update_mask', ), - 'update_tag': ('tag', 'update_mask', ), - 'update_tag_template': ('tag_template', 'update_mask', ), - 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), - 'update_taxonomy': ('taxonomy', 'update_mask', ), - + 'create_entry': ('parent', 'entry_id', 'entry', ), + 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), + 'create_policy_tag': ('parent', 'policy_tag', ), + 'create_tag': ('parent', 'tag', ), + 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), + 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), + 'create_taxonomy': ('parent', 'taxonomy', ), + 'delete_entry': ('name', ), + 'delete_entry_group': ('name', 'force', ), + 'delete_policy_tag': ('name', ), + 'delete_tag': ('name', ), + 'delete_tag_template': ('name', 'force', ), + 'delete_tag_template_field': ('name', 'force', ), + 'delete_taxonomy': ('name', ), + 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), + 'get_entry': ('name', ), + 'get_entry_group': ('name', 'read_mask', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_policy_tag': ('name', ), + 'get_tag_template': ('name', ), + 'get_taxonomy': ('name', ), + 'import_taxonomies': ('parent', 'inline_source', ), + 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), + 'list_entry_groups': ('parent', 'page_size', 'page_token', ), + 'list_policy_tags': ('parent', 'page_size', 'page_token', ), + 'list_tags': ('parent', 'page_size', 'page_token', ), + 'list_taxonomies': ('parent', 'page_size', 'page_token', ), + 'lookup_entry': ('linked_resource', 'sql_resource', ), + 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), + 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_entry': ('entry', 'update_mask', ), + 'update_entry_group': ('entry_group', 'update_mask', ), + 'update_policy_tag': ('policy_tag', 'update_mask', ), + 'update_tag': ('tag', 'update_mask', ), + 'update_tag_template': ('tag_template', 'update_mask', ), + 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), + 'update_taxonomy': ('taxonomy', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -111,7 +108,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index a732223e..770a9840 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -8,4 +8,5 @@ google-api-core==1.22.2 grpc-google-iam-v1==0.12.3 libcst==0.2.5 -proto-plus==1.4.0 \ No newline at end of file +proto-plus==1.4.0 +google-auth==1.24.0 # TODO(busunkim): remove when google-auth>=1.25.0 is implicitly required through newer google-api-core diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/datacatalog_v1/__init__.py b/tests/unit/gapic/datacatalog_v1/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/datacatalog_v1/__init__.py +++ b/tests/unit/gapic/datacatalog_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 301b8027..55b03b47 100644 --- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,19 +23,26 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datacatalog_v1.services.data_catalog import DataCatalogAsyncClient from google.cloud.datacatalog_v1.services.data_catalog import DataCatalogClient from google.cloud.datacatalog_v1.services.data_catalog import pagers from google.cloud.datacatalog_v1.services.data_catalog import transports +from google.cloud.datacatalog_v1.services.data_catalog.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.datacatalog_v1.services.data_catalog.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema @@ -44,13 +50,37 @@ from google.cloud.datacatalog_v1.types import table_spec from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.type import expr_pb2 as expr # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -96,7 +126,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [DataCatalogClient, DataCatalogAsyncClient,]) def test_data_catalog_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -111,7 +141,7 @@ def test_data_catalog_client_from_service_account_info(client_class): @pytest.mark.parametrize("client_class", [DataCatalogClient, DataCatalogAsyncClient,]) def test_data_catalog_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -162,7 +192,7 @@ def test_data_catalog_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DataCatalogClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -446,7 +476,7 @@ def test_search_catalog( transport: str = "grpc", request_type=datacatalog.SearchCatalogRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -459,21 +489,16 @@ def test_search_catalog( call.return_value = datacatalog.SearchCatalogResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.search_catalog(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchCatalogPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -485,7 +510,7 @@ def test_search_catalog_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -493,7 +518,6 @@ def test_search_catalog_empty_call(): client.search_catalog() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() @@ -502,7 +526,7 @@ async def test_search_catalog_async( transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -518,20 +542,16 @@ async def test_search_catalog_async( unreachable=["unreachable_value"], ) ) - response = await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -541,13 +561,12 @@ async def test_search_catalog_async_from_dict(): def test_search_catalog_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_catalog( @@ -561,16 +580,14 @@ def test_search_catalog_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].scope == datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ) - assert args[0].query == "query_value" def test_search_catalog_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -586,7 +603,7 @@ def test_search_catalog_flattened_error(): @pytest.mark.asyncio async def test_search_catalog_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: @@ -609,17 +626,15 @@ async def test_search_catalog_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].scope == datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ) - assert args[0].query == "query_value" @pytest.mark.asyncio async def test_search_catalog_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -634,7 +649,7 @@ async def test_search_catalog_flattened_error_async(): def test_search_catalog_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: @@ -669,7 +684,7 @@ def test_search_catalog_pager(): def test_search_catalog_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: @@ -699,7 +714,7 @@ def test_search_catalog_pages(): @pytest.mark.asyncio async def test_search_catalog_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -736,7 +751,7 @@ async def test_search_catalog_async_pager(): @pytest.mark.asyncio async def test_search_catalog_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -772,7 +787,7 @@ def test_create_entry_group( transport: str = "grpc", request_type=datacatalog.CreateEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -789,23 +804,17 @@ def test_create_entry_group( display_name="display_name_value", description="description_value", ) - response = client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -817,7 +826,7 @@ def test_create_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -827,7 +836,6 @@ def test_create_entry_group_empty_call(): client.create_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() @@ -836,7 +844,7 @@ async def test_create_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -855,22 +863,17 @@ async def test_create_entry_group_async( description="description_value", ) ) - response = await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -880,11 +883,12 @@ async def test_create_entry_group_async_from_dict(): def test_create_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryGroupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -892,7 +896,6 @@ def test_create_entry_group_field_headers(): type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() - client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -907,11 +910,12 @@ def test_create_entry_group_field_headers(): @pytest.mark.asyncio async def test_create_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryGroupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -921,7 +925,6 @@ async def test_create_entry_group_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) - await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -935,7 +938,7 @@ async def test_create_entry_group_field_headers_async(): def test_create_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -943,7 +946,6 @@ def test_create_entry_group_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_entry_group( @@ -956,16 +958,13 @@ def test_create_entry_group_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_group_id == "entry_group_id_value" - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") def test_create_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -980,7 +979,7 @@ def test_create_entry_group_flattened_error(): @pytest.mark.asyncio async def test_create_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1004,17 +1003,14 @@ async def test_create_entry_group_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_group_id == "entry_group_id_value" - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") @pytest.mark.asyncio async def test_create_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1031,7 +1027,7 @@ def test_get_entry_group( transport: str = "grpc", request_type=datacatalog.GetEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1046,23 +1042,17 @@ def test_get_entry_group( display_name="display_name_value", description="description_value", ) - response = client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1074,7 +1064,7 @@ def test_get_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1082,7 +1072,6 @@ def test_get_entry_group_empty_call(): client.get_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() @@ -1091,7 +1080,7 @@ async def test_get_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1108,22 +1097,17 @@ async def test_get_entry_group_async( description="description_value", ) ) - response = await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1133,17 +1117,17 @@ async def test_get_entry_group_async_from_dict(): def test_get_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = datacatalog.EntryGroup() - client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1158,11 +1142,12 @@ def test_get_entry_group_field_headers(): @pytest.mark.asyncio async def test_get_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1170,7 +1155,6 @@ async def test_get_entry_group_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) - await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1184,31 +1168,29 @@ async def test_get_entry_group_field_headers_async(): def test_get_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_entry_group( - name="name_value", read_mask=field_mask.FieldMask(paths=["paths_value"]), + name="name_value", + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].read_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].read_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_get_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1216,13 +1198,13 @@ def test_get_entry_group_flattened_error(): client.get_entry_group( datacatalog.GetEntryGroupRequest(), name="name_value", - read_mask=field_mask.FieldMask(paths=["paths_value"]), + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_get_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: @@ -1235,22 +1217,21 @@ async def test_get_entry_group_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_entry_group( - name="name_value", read_mask=field_mask.FieldMask(paths=["paths_value"]), + name="name_value", + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].read_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].read_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_get_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1258,7 +1239,7 @@ async def test_get_entry_group_flattened_error_async(): await client.get_entry_group( datacatalog.GetEntryGroupRequest(), name="name_value", - read_mask=field_mask.FieldMask(paths=["paths_value"]), + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1266,7 +1247,7 @@ def test_update_entry_group( transport: str = "grpc", request_type=datacatalog.UpdateEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1283,23 +1264,17 @@ def test_update_entry_group( display_name="display_name_value", description="description_value", ) - response = client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1311,7 +1286,7 @@ def test_update_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1321,7 +1296,6 @@ def test_update_entry_group_empty_call(): client.update_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() @@ -1330,7 +1304,7 @@ async def test_update_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1349,22 +1323,17 @@ async def test_update_entry_group_async( description="description_value", ) ) - response = await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1374,11 +1343,12 @@ async def test_update_entry_group_async_from_dict(): def test_update_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryGroupRequest() + request.entry_group.name = "entry_group.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1386,7 +1356,6 @@ def test_update_entry_group_field_headers(): type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() - client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1403,11 +1372,12 @@ def test_update_entry_group_field_headers(): @pytest.mark.asyncio async def test_update_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryGroupRequest() + request.entry_group.name = "entry_group.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1417,7 +1387,6 @@ async def test_update_entry_group_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) - await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1433,7 +1402,7 @@ async def test_update_entry_group_field_headers_async(): def test_update_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1441,26 +1410,23 @@ def test_update_entry_group_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_entry_group( entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1468,13 +1434,13 @@ def test_update_entry_group_flattened_error(): client.update_entry_group( datacatalog.UpdateEntryGroupRequest(), entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1490,22 +1456,20 @@ async def test_update_entry_group_flattened_async(): # using the keyword arguments to the method. response = await client.update_entry_group( entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1513,7 +1477,7 @@ async def test_update_entry_group_flattened_error_async(): await client.update_entry_group( datacatalog.UpdateEntryGroupRequest(), entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1521,7 +1485,7 @@ def test_delete_entry_group( transport: str = "grpc", request_type=datacatalog.DeleteEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1534,13 +1498,11 @@ def test_delete_entry_group( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. @@ -1555,7 +1517,7 @@ def test_delete_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1565,7 +1527,6 @@ def test_delete_entry_group_empty_call(): client.delete_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() @@ -1574,7 +1535,7 @@ async def test_delete_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1587,13 +1548,11 @@ async def test_delete_entry_group_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. @@ -1606,11 +1565,12 @@ async def test_delete_entry_group_async_from_dict(): def test_delete_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1618,7 +1578,6 @@ def test_delete_entry_group_field_headers(): type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = None - client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1633,11 +1592,12 @@ def test_delete_entry_group_field_headers(): @pytest.mark.asyncio async def test_delete_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1645,7 +1605,6 @@ async def test_delete_entry_group_field_headers_async(): type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1659,7 +1618,7 @@ async def test_delete_entry_group_field_headers_async(): def test_delete_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1667,7 +1626,6 @@ def test_delete_entry_group_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_entry_group(name="name_value",) @@ -1676,12 +1634,11 @@ def test_delete_entry_group_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1693,7 +1650,7 @@ def test_delete_entry_group_flattened_error(): @pytest.mark.asyncio async def test_delete_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1711,13 +1668,12 @@ async def test_delete_entry_group_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1731,7 +1687,7 @@ def test_list_entry_groups( transport: str = "grpc", request_type=datacatalog.ListEntryGroupsRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1746,19 +1702,15 @@ def test_list_entry_groups( call.return_value = datacatalog.ListEntryGroupsResponse( next_page_token="next_page_token_value", ) - response = client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == "next_page_token_value" @@ -1770,7 +1722,7 @@ def test_list_entry_groups_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1780,7 +1732,6 @@ def test_list_entry_groups_empty_call(): client.list_entry_groups() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() @@ -1789,7 +1740,7 @@ async def test_list_entry_groups_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1806,18 +1757,15 @@ async def test_list_entry_groups_async( next_page_token="next_page_token_value", ) ) - response = await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntryGroupsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1827,11 +1775,12 @@ async def test_list_entry_groups_async_from_dict(): def test_list_entry_groups_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntryGroupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1839,7 +1788,6 @@ def test_list_entry_groups_field_headers(): type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = datacatalog.ListEntryGroupsResponse() - client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. @@ -1854,11 +1802,12 @@ def test_list_entry_groups_field_headers(): @pytest.mark.asyncio async def test_list_entry_groups_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntryGroupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1868,7 +1817,6 @@ async def test_list_entry_groups_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse() ) - await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. @@ -1882,7 +1830,7 @@ async def test_list_entry_groups_field_headers_async(): def test_list_entry_groups_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1890,7 +1838,6 @@ def test_list_entry_groups_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_entry_groups(parent="parent_value",) @@ -1899,12 +1846,11 @@ def test_list_entry_groups_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_entry_groups_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1916,7 +1862,7 @@ def test_list_entry_groups_flattened_error(): @pytest.mark.asyncio async def test_list_entry_groups_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1936,13 +1882,12 @@ async def test_list_entry_groups_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_entry_groups_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1953,7 +1898,7 @@ async def test_list_entry_groups_flattened_error_async(): def test_list_entry_groups_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1995,7 +1940,7 @@ def test_list_entry_groups_pager(): def test_list_entry_groups_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2029,7 +1974,7 @@ def test_list_entry_groups_pages(): @pytest.mark.asyncio async def test_list_entry_groups_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2070,7 +2015,7 @@ async def test_list_entry_groups_async_pager(): @pytest.mark.asyncio async def test_list_entry_groups_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2110,7 +2055,7 @@ def test_create_entry( transport: str = "grpc", request_type=datacatalog.CreateEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2123,6 +2068,7 @@ def test_create_entry( call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -2130,26 +2076,23 @@ def test_create_entry( gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) - response = client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2161,7 +2104,7 @@ def test_create_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2169,7 +2112,6 @@ def test_create_entry_empty_call(): client.create_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() @@ -2178,7 +2120,7 @@ async def test_create_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2192,28 +2134,24 @@ async def test_create_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) ) - response = await client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2223,17 +2161,17 @@ async def test_create_entry_async_from_dict(): def test_create_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = datacatalog.Entry() - client.create_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2248,17 +2186,17 @@ def test_create_entry_field_headers(): @pytest.mark.asyncio async def test_create_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.create_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2272,13 +2210,12 @@ async def test_create_entry_field_headers_async(): def test_create_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_entry( @@ -2291,16 +2228,13 @@ def test_create_entry_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_id == "entry_id_value" - assert args[0].entry == datacatalog.Entry(name="name_value") def test_create_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2315,7 +2249,7 @@ def test_create_entry_flattened_error(): @pytest.mark.asyncio async def test_create_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: @@ -2335,17 +2269,14 @@ async def test_create_entry_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_id == "entry_id_value" - assert args[0].entry == datacatalog.Entry(name="name_value") @pytest.mark.asyncio async def test_create_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2362,7 +2293,7 @@ def test_update_entry( transport: str = "grpc", request_type=datacatalog.UpdateEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2375,6 +2306,7 @@ def test_update_entry( call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -2382,26 +2314,23 @@ def test_update_entry( gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) - response = client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2413,7 +2342,7 @@ def test_update_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2421,7 +2350,6 @@ def test_update_entry_empty_call(): client.update_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() @@ -2430,7 +2358,7 @@ async def test_update_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2444,28 +2372,24 @@ async def test_update_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) ) - response = await client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2475,17 +2399,17 @@ async def test_update_entry_async_from_dict(): def test_update_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryRequest() + request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = datacatalog.Entry() - client.update_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2500,17 +2424,17 @@ def test_update_entry_field_headers(): @pytest.mark.asyncio async def test_update_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryRequest() + request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.update_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2524,32 +2448,29 @@ async def test_update_entry_field_headers_async(): def test_update_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_entry( entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].entry == datacatalog.Entry(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2557,13 +2478,13 @@ def test_update_entry_flattened_error(): client.update_entry( datacatalog.UpdateEntryRequest(), entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: @@ -2575,22 +2496,20 @@ async def test_update_entry_flattened_async(): # using the keyword arguments to the method. response = await client.update_entry( entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].entry == datacatalog.Entry(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2598,7 +2517,7 @@ async def test_update_entry_flattened_error_async(): await client.update_entry( datacatalog.UpdateEntryRequest(), entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2606,7 +2525,7 @@ def test_delete_entry( transport: str = "grpc", request_type=datacatalog.DeleteEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2617,13 +2536,11 @@ def test_delete_entry( with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. @@ -2638,7 +2555,7 @@ def test_delete_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2646,7 +2563,6 @@ def test_delete_entry_empty_call(): client.delete_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() @@ -2655,7 +2571,7 @@ async def test_delete_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2666,13 +2582,11 @@ async def test_delete_entry_async( with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. @@ -2685,17 +2599,17 @@ async def test_delete_entry_async_from_dict(): def test_delete_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = None - client.delete_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2710,17 +2624,17 @@ def test_delete_entry_field_headers(): @pytest.mark.asyncio async def test_delete_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2734,13 +2648,12 @@ async def test_delete_entry_field_headers_async(): def test_delete_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_entry(name="name_value",) @@ -2749,12 +2662,11 @@ def test_delete_entry_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2766,7 +2678,7 @@ def test_delete_entry_flattened_error(): @pytest.mark.asyncio async def test_delete_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: @@ -2782,13 +2694,12 @@ async def test_delete_entry_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2800,7 +2711,7 @@ async def test_delete_entry_flattened_error_async(): def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2813,6 +2724,7 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -2820,26 +2732,23 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) - response = client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2851,7 +2760,7 @@ def test_get_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2859,7 +2768,6 @@ def test_get_entry_empty_call(): client.get_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() @@ -2868,7 +2776,7 @@ async def test_get_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2882,28 +2790,24 @@ async def test_get_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) ) - response = await client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2913,17 +2817,17 @@ async def test_get_entry_async_from_dict(): def test_get_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = datacatalog.Entry() - client.get_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2938,17 +2842,17 @@ def test_get_entry_field_headers(): @pytest.mark.asyncio async def test_get_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.get_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2962,13 +2866,12 @@ async def test_get_entry_field_headers_async(): def test_get_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_entry(name="name_value",) @@ -2977,12 +2880,11 @@ def test_get_entry_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2994,7 +2896,7 @@ def test_get_entry_flattened_error(): @pytest.mark.asyncio async def test_get_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: @@ -3010,13 +2912,12 @@ async def test_get_entry_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3030,7 +2931,7 @@ def test_lookup_entry( transport: str = "grpc", request_type=datacatalog.LookupEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3043,6 +2944,7 @@ def test_lookup_entry( call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, @@ -3050,26 +2952,23 @@ def test_lookup_entry( gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), + database_table_spec=datacatalog.DatabaseTableSpec( + type_=datacatalog.DatabaseTableSpec.TableType.NATIVE + ), ) - response = client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -3081,7 +2980,7 @@ def test_lookup_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3089,7 +2988,6 @@ def test_lookup_entry_empty_call(): client.lookup_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() @@ -3098,7 +2996,7 @@ async def test_lookup_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3112,28 +3010,24 @@ async def test_lookup_entry_async( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", + fully_qualified_name="fully_qualified_name_value", display_name="display_name_value", description="description_value", ) ) - response = await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - + assert response.fully_qualified_name == "fully_qualified_name_value" assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -3146,7 +3040,7 @@ def test_list_entries( transport: str = "grpc", request_type=datacatalog.ListEntriesRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3159,19 +3053,15 @@ def test_list_entries( call.return_value = datacatalog.ListEntriesResponse( next_page_token="next_page_token_value", ) - response = client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == "next_page_token_value" @@ -3183,7 +3073,7 @@ def test_list_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3191,7 +3081,6 @@ def test_list_entries_empty_call(): client.list_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() @@ -3200,7 +3089,7 @@ async def test_list_entries_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3213,18 +3102,15 @@ async def test_list_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3234,17 +3120,17 @@ async def test_list_entries_async_from_dict(): def test_list_entries_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntriesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = datacatalog.ListEntriesResponse() - client.list_entries(request) # Establish that the underlying gRPC stub method was called. @@ -3259,11 +3145,12 @@ def test_list_entries_field_headers(): @pytest.mark.asyncio async def test_list_entries_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntriesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3271,7 +3158,6 @@ async def test_list_entries_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse() ) - await client.list_entries(request) # Establish that the underlying gRPC stub method was called. @@ -3285,13 +3171,12 @@ async def test_list_entries_field_headers_async(): def test_list_entries_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_entries(parent="parent_value",) @@ -3300,12 +3185,11 @@ def test_list_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_entries_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3317,7 +3201,7 @@ def test_list_entries_flattened_error(): @pytest.mark.asyncio async def test_list_entries_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: @@ -3335,13 +3219,12 @@ async def test_list_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_entries_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3352,7 +3235,7 @@ async def test_list_entries_flattened_error_async(): def test_list_entries_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: @@ -3390,7 +3273,7 @@ def test_list_entries_pager(): def test_list_entries_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: @@ -3420,7 +3303,7 @@ def test_list_entries_pages(): @pytest.mark.asyncio async def test_list_entries_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3457,7 +3340,7 @@ async def test_list_entries_async_pager(): @pytest.mark.asyncio async def test_list_entries_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3493,7 +3376,7 @@ def test_create_tag_template( transport: str = "grpc", request_type=datacatalog.CreateTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3508,21 +3391,16 @@ def test_create_tag_template( call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) - response = client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3534,7 +3412,7 @@ def test_create_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3544,7 +3422,6 @@ def test_create_tag_template_empty_call(): client.create_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() @@ -3553,7 +3430,7 @@ async def test_create_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3568,20 +3445,16 @@ async def test_create_tag_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) - response = await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3591,11 +3464,12 @@ async def test_create_tag_template_async_from_dict(): def test_create_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3603,7 +3477,6 @@ def test_create_tag_template_field_headers(): type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() - client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3618,11 +3491,12 @@ def test_create_tag_template_field_headers(): @pytest.mark.asyncio async def test_create_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3630,7 +3504,6 @@ async def test_create_tag_template_field_headers_async(): type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3644,7 +3517,7 @@ async def test_create_tag_template_field_headers_async(): def test_create_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3652,7 +3525,6 @@ def test_create_tag_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag_template( @@ -3665,16 +3537,13 @@ def test_create_tag_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_id == "tag_template_id_value" - assert args[0].tag_template == tags.TagTemplate(name="name_value") def test_create_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3689,7 +3558,7 @@ def test_create_tag_template_flattened_error(): @pytest.mark.asyncio async def test_create_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3711,17 +3580,14 @@ async def test_create_tag_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_id == "tag_template_id_value" - assert args[0].tag_template == tags.TagTemplate(name="name_value") @pytest.mark.asyncio async def test_create_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3738,7 +3604,7 @@ def test_get_tag_template( transport: str = "grpc", request_type=datacatalog.GetTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3751,21 +3617,16 @@ def test_get_tag_template( call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) - response = client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3777,7 +3638,7 @@ def test_get_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3785,7 +3646,6 @@ def test_get_tag_template_empty_call(): client.get_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() @@ -3794,7 +3654,7 @@ async def test_get_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3807,20 +3667,16 @@ async def test_get_tag_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) - response = await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3830,17 +3686,17 @@ async def test_get_tag_template_async_from_dict(): def test_get_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = tags.TagTemplate() - client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3855,17 +3711,17 @@ def test_get_tag_template_field_headers(): @pytest.mark.asyncio async def test_get_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3879,13 +3735,12 @@ async def test_get_tag_template_field_headers_async(): def test_get_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_tag_template(name="name_value",) @@ -3894,12 +3749,11 @@ def test_get_tag_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3911,7 +3765,7 @@ def test_get_tag_template_flattened_error(): @pytest.mark.asyncio async def test_get_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: @@ -3927,13 +3781,12 @@ async def test_get_tag_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3947,7 +3800,7 @@ def test_update_tag_template( transport: str = "grpc", request_type=datacatalog.UpdateTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3962,21 +3815,16 @@ def test_update_tag_template( call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) - response = client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3988,7 +3836,7 @@ def test_update_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3998,7 +3846,6 @@ def test_update_tag_template_empty_call(): client.update_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() @@ -4007,7 +3854,7 @@ async def test_update_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4022,20 +3869,16 @@ async def test_update_tag_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) - response = await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -4045,11 +3888,12 @@ async def test_update_tag_template_async_from_dict(): def test_update_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateRequest() + request.tag_template.name = "tag_template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4057,7 +3901,6 @@ def test_update_tag_template_field_headers(): type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() - client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4075,11 +3918,12 @@ def test_update_tag_template_field_headers(): @pytest.mark.asyncio async def test_update_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateRequest() + request.tag_template.name = "tag_template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4087,7 +3931,6 @@ async def test_update_tag_template_field_headers_async(): type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4104,7 +3947,7 @@ async def test_update_tag_template_field_headers_async(): def test_update_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4112,26 +3955,23 @@ def test_update_tag_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag_template( tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].tag_template == tags.TagTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4139,13 +3979,13 @@ def test_update_tag_template_flattened_error(): client.update_tag_template( datacatalog.UpdateTagTemplateRequest(), tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4159,22 +3999,20 @@ async def test_update_tag_template_flattened_async(): # using the keyword arguments to the method. response = await client.update_tag_template( tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].tag_template == tags.TagTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4182,7 +4020,7 @@ async def test_update_tag_template_flattened_error_async(): await client.update_tag_template( datacatalog.UpdateTagTemplateRequest(), tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4190,7 +4028,7 @@ def test_delete_tag_template( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4203,13 +4041,11 @@ def test_delete_tag_template( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. @@ -4224,7 +4060,7 @@ def test_delete_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4234,7 +4070,6 @@ def test_delete_tag_template_empty_call(): client.delete_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() @@ -4243,7 +4078,7 @@ async def test_delete_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4256,13 +4091,11 @@ async def test_delete_tag_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. @@ -4275,11 +4108,12 @@ async def test_delete_tag_template_async_from_dict(): def test_delete_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4287,7 +4121,6 @@ def test_delete_tag_template_field_headers(): type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = None - client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4302,11 +4135,12 @@ def test_delete_tag_template_field_headers(): @pytest.mark.asyncio async def test_delete_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4314,7 +4148,6 @@ async def test_delete_tag_template_field_headers_async(): type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4328,7 +4161,7 @@ async def test_delete_tag_template_field_headers_async(): def test_delete_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4336,7 +4169,6 @@ def test_delete_tag_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag_template( @@ -4347,14 +4179,12 @@ def test_delete_tag_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True def test_delete_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4366,7 +4196,7 @@ def test_delete_tag_template_flattened_error(): @pytest.mark.asyncio async def test_delete_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4384,15 +4214,13 @@ async def test_delete_tag_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True @pytest.mark.asyncio async def test_delete_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4406,7 +4234,7 @@ def test_create_tag_template_field( transport: str = "grpc", request_type=datacatalog.CreateTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4422,27 +4250,22 @@ def test_create_tag_template_field( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) - response = client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - + assert response.description == "description_value" assert response.order == 540 @@ -4454,7 +4277,7 @@ def test_create_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4464,7 +4287,6 @@ def test_create_tag_template_field_empty_call(): client.create_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() @@ -4474,7 +4296,7 @@ async def test_create_tag_template_field_async( request_type=datacatalog.CreateTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4491,27 +4313,23 @@ async def test_create_tag_template_field_async( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) ) - response = await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - + assert response.description == "description_value" assert response.order == 540 @@ -4521,11 +4339,12 @@ async def test_create_tag_template_field_async_from_dict(): def test_create_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateFieldRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4533,7 +4352,6 @@ def test_create_tag_template_field_field_headers(): type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() - client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4548,11 +4366,12 @@ def test_create_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_create_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateFieldRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4562,7 +4381,6 @@ async def test_create_tag_template_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) - await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4576,7 +4394,7 @@ async def test_create_tag_template_field_field_headers_async(): def test_create_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4584,7 +4402,6 @@ def test_create_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag_template_field( @@ -4597,16 +4414,13 @@ def test_create_tag_template_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_field_id == "tag_template_field_id_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") def test_create_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4621,7 +4435,7 @@ def test_create_tag_template_field_flattened_error(): @pytest.mark.asyncio async def test_create_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4645,17 +4459,14 @@ async def test_create_tag_template_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_field_id == "tag_template_field_id_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") @pytest.mark.asyncio async def test_create_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4672,7 +4483,7 @@ def test_update_tag_template_field( transport: str = "grpc", request_type=datacatalog.UpdateTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4688,27 +4499,22 @@ def test_update_tag_template_field( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) - response = client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - + assert response.description == "description_value" assert response.order == 540 @@ -4720,7 +4526,7 @@ def test_update_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4730,7 +4536,6 @@ def test_update_tag_template_field_empty_call(): client.update_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() @@ -4740,7 +4545,7 @@ async def test_update_tag_template_field_async( request_type=datacatalog.UpdateTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4757,27 +4562,23 @@ async def test_update_tag_template_field_async( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) ) - response = await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - + assert response.description == "description_value" assert response.order == 540 @@ -4787,11 +4588,12 @@ async def test_update_tag_template_field_async_from_dict(): def test_update_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4799,7 +4601,6 @@ def test_update_tag_template_field_field_headers(): type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() - client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4814,11 +4615,12 @@ def test_update_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_update_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4828,7 +4630,6 @@ async def test_update_tag_template_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) - await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4842,7 +4643,7 @@ async def test_update_tag_template_field_field_headers_async(): def test_update_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4850,29 +4651,25 @@ def test_update_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag_template_field( name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4881,13 +4678,13 @@ def test_update_tag_template_field_flattened_error(): datacatalog.UpdateTagTemplateFieldRequest(), name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4904,24 +4701,21 @@ async def test_update_tag_template_field_flattened_async(): response = await client.update_tag_template_field( name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4930,7 +4724,7 @@ async def test_update_tag_template_field_flattened_error_async(): datacatalog.UpdateTagTemplateFieldRequest(), name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4938,7 +4732,7 @@ def test_rename_tag_template_field( transport: str = "grpc", request_type=datacatalog.RenameTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4954,27 +4748,22 @@ def test_rename_tag_template_field( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) - response = client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - + assert response.description == "description_value" assert response.order == 540 @@ -4986,7 +4775,7 @@ def test_rename_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4996,7 +4785,6 @@ def test_rename_tag_template_field_empty_call(): client.rename_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() @@ -5006,7 +4794,7 @@ async def test_rename_tag_template_field_async( request_type=datacatalog.RenameTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5023,27 +4811,23 @@ async def test_rename_tag_template_field_async( name="name_value", display_name="display_name_value", is_required=True, + description="description_value", order=540, ) ) - response = await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - + assert response.description == "description_value" assert response.order == 540 @@ -5053,11 +4837,12 @@ async def test_rename_tag_template_field_async_from_dict(): def test_rename_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.RenameTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5065,7 +4850,6 @@ def test_rename_tag_template_field_field_headers(): type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() - client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5080,11 +4864,12 @@ def test_rename_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_rename_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.RenameTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5094,7 +4879,6 @@ async def test_rename_tag_template_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) - await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5108,7 +4892,7 @@ async def test_rename_tag_template_field_field_headers_async(): def test_rename_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5116,7 +4900,6 @@ def test_rename_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rename_tag_template_field( @@ -5128,14 +4911,12 @@ def test_rename_tag_template_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].new_tag_template_field_id == "new_tag_template_field_id_value" def test_rename_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5149,7 +4930,7 @@ def test_rename_tag_template_field_flattened_error(): @pytest.mark.asyncio async def test_rename_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5172,15 +4953,13 @@ async def test_rename_tag_template_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].new_tag_template_field_id == "new_tag_template_field_id_value" @pytest.mark.asyncio async def test_rename_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5192,11 +4971,259 @@ async def test_rename_tag_template_field_flattened_error_async(): ) +def test_rename_tag_template_field_enum_value( + transport: str = "grpc", + request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest, +): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name="name_value", + display_name="display_name_value", + is_required=True, + description="description_value", + order=540, + ) + response = client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 + + +def test_rename_tag_template_field_enum_value_from_dict(): + test_rename_tag_template_field_enum_value(request_type=dict) + + +def test_rename_tag_template_field_enum_value_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + client.rename_tag_template_field_enum_value() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest, +): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tags.TagTemplateField( + name="name_value", + display_name="display_name_value", + is_required=True, + description="description_value", + order=540, + ) + ) + response = await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_required is True + assert response.description == "description_value" + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async_from_dict(): + await test_rename_tag_template_field_enum_value_async(request_type=dict) + + +def test_rename_tag_template_field_enum_value_field_headers(): + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + call.return_value = tags.TagTemplateField() + client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_field_headers_async(): + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tags.TagTemplateField() + ) + await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_rename_tag_template_field_enum_value_flattened(): + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_tag_template_field_enum_value( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + assert ( + args[0].new_enum_value_display_name == "new_enum_value_display_name_value" + ) + + +def test_rename_tag_template_field_enum_value_flattened_error(): + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_async(): + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tags.TagTemplateField() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_tag_template_field_enum_value( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + assert ( + args[0].new_enum_value_display_name == "new_enum_value_display_name_value" + ) + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_error_async(): + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + def test_delete_tag_template_field( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5209,13 +5236,11 @@ def test_delete_tag_template_field( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. @@ -5230,7 +5255,7 @@ def test_delete_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5240,7 +5265,6 @@ def test_delete_tag_template_field_empty_call(): client.delete_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() @@ -5250,7 +5274,7 @@ async def test_delete_tag_template_field_async( request_type=datacatalog.DeleteTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5263,13 +5287,11 @@ async def test_delete_tag_template_field_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. @@ -5282,11 +5304,12 @@ async def test_delete_tag_template_field_async_from_dict(): def test_delete_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5294,7 +5317,6 @@ def test_delete_tag_template_field_field_headers(): type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = None - client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5309,11 +5331,12 @@ def test_delete_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_delete_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5321,7 +5344,6 @@ async def test_delete_tag_template_field_field_headers_async(): type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5335,7 +5357,7 @@ async def test_delete_tag_template_field_field_headers_async(): def test_delete_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5343,7 +5365,6 @@ def test_delete_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag_template_field( @@ -5354,14 +5375,12 @@ def test_delete_tag_template_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True def test_delete_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5373,7 +5392,7 @@ def test_delete_tag_template_field_flattened_error(): @pytest.mark.asyncio async def test_delete_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5393,15 +5412,13 @@ async def test_delete_tag_template_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True @pytest.mark.asyncio async def test_delete_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5413,7 +5430,7 @@ async def test_delete_tag_template_field_flattened_error_async(): def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5429,23 +5446,17 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR template_display_name="template_display_name_value", column="column_value", ) - response = client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5457,7 +5468,7 @@ def test_create_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5465,7 +5476,6 @@ def test_create_tag_empty_call(): client.create_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() @@ -5474,7 +5484,7 @@ async def test_create_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5491,22 +5501,17 @@ async def test_create_tag_async( template_display_name="template_display_name_value", ) ) - response = await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5516,17 +5521,17 @@ async def test_create_tag_async_from_dict(): def test_create_tag_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = tags.Tag() - client.create_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5541,17 +5546,17 @@ def test_create_tag_field_headers(): @pytest.mark.asyncio async def test_create_tag_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.create_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5565,13 +5570,12 @@ async def test_create_tag_field_headers_async(): def test_create_tag_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag( @@ -5582,14 +5586,12 @@ def test_create_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag == tags.Tag(name="name_value") def test_create_tag_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5603,7 +5605,7 @@ def test_create_tag_flattened_error(): @pytest.mark.asyncio async def test_create_tag_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: @@ -5621,15 +5623,13 @@ async def test_create_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag == tags.Tag(name="name_value") @pytest.mark.asyncio async def test_create_tag_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5643,7 +5643,7 @@ async def test_create_tag_flattened_error_async(): def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5659,23 +5659,17 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR template_display_name="template_display_name_value", column="column_value", ) - response = client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5687,7 +5681,7 @@ def test_update_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5695,7 +5689,6 @@ def test_update_tag_empty_call(): client.update_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() @@ -5704,7 +5697,7 @@ async def test_update_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5721,22 +5714,17 @@ async def test_update_tag_async( template_display_name="template_display_name_value", ) ) - response = await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5746,17 +5734,17 @@ async def test_update_tag_async_from_dict(): def test_update_tag_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagRequest() + request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = tags.Tag() - client.update_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5771,17 +5759,17 @@ def test_update_tag_field_headers(): @pytest.mark.asyncio async def test_update_tag_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagRequest() + request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.update_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5795,32 +5783,29 @@ async def test_update_tag_field_headers_async(): def test_update_tag_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag( tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].tag == tags.Tag(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5828,13 +5813,13 @@ def test_update_tag_flattened_error(): client.update_tag( datacatalog.UpdateTagRequest(), tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: @@ -5846,22 +5831,20 @@ async def test_update_tag_flattened_async(): # using the keyword arguments to the method. response = await client.update_tag( tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].tag == tags.Tag(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5869,13 +5852,13 @@ async def test_update_tag_flattened_error_async(): await client.update_tag( datacatalog.UpdateTagRequest(), tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5886,13 +5869,11 @@ def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagR with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. @@ -5907,7 +5888,7 @@ def test_delete_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5915,7 +5896,6 @@ def test_delete_tag_empty_call(): client.delete_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() @@ -5924,7 +5904,7 @@ async def test_delete_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5935,13 +5915,11 @@ async def test_delete_tag_async( with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. @@ -5954,17 +5932,17 @@ async def test_delete_tag_async_from_dict(): def test_delete_tag_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = None - client.delete_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5979,17 +5957,17 @@ def test_delete_tag_field_headers(): @pytest.mark.asyncio async def test_delete_tag_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. @@ -6003,13 +5981,12 @@ async def test_delete_tag_field_headers_async(): def test_delete_tag_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag(name="name_value",) @@ -6018,12 +5995,11 @@ def test_delete_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_tag_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6035,7 +6011,7 @@ def test_delete_tag_flattened_error(): @pytest.mark.asyncio async def test_delete_tag_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: @@ -6051,13 +6027,12 @@ async def test_delete_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_tag_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6069,7 +6044,7 @@ async def test_delete_tag_flattened_error_async(): def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6082,19 +6057,15 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq call.return_value = datacatalog.ListTagsResponse( next_page_token="next_page_token_value", ) - response = client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsPager) - assert response.next_page_token == "next_page_token_value" @@ -6106,7 +6077,7 @@ def test_list_tags_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6114,7 +6085,6 @@ def test_list_tags_empty_call(): client.list_tags() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() @@ -6123,7 +6093,7 @@ async def test_list_tags_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6136,18 +6106,15 @@ async def test_list_tags_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTagsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -6157,17 +6124,17 @@ async def test_list_tags_async_from_dict(): def test_list_tags_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListTagsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = datacatalog.ListTagsResponse() - client.list_tags(request) # Establish that the underlying gRPC stub method was called. @@ -6182,11 +6149,12 @@ def test_list_tags_field_headers(): @pytest.mark.asyncio async def test_list_tags_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListTagsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -6194,7 +6162,6 @@ async def test_list_tags_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse() ) - await client.list_tags(request) # Establish that the underlying gRPC stub method was called. @@ -6208,13 +6175,12 @@ async def test_list_tags_field_headers_async(): def test_list_tags_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_tags(parent="parent_value",) @@ -6223,12 +6189,11 @@ def test_list_tags_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_tags_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6240,7 +6205,7 @@ def test_list_tags_flattened_error(): @pytest.mark.asyncio async def test_list_tags_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: @@ -6258,13 +6223,12 @@ async def test_list_tags_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_tags_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6275,7 +6239,7 @@ async def test_list_tags_flattened_error_async(): def test_list_tags_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: @@ -6304,7 +6268,7 @@ def test_list_tags_pager(): def test_list_tags_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: @@ -6325,7 +6289,7 @@ def test_list_tags_pages(): @pytest.mark.asyncio async def test_list_tags_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6353,7 +6317,7 @@ async def test_list_tags_async_pager(): @pytest.mark.asyncio async def test_list_tags_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6377,10 +6341,10 @@ async def test_list_tags_async_pages(): def test_set_iam_policy( - transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest + transport: str = "grpc", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6390,22 +6354,17 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) - + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6417,7 +6376,7 @@ def test_set_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6425,16 +6384,15 @@ def test_set_iam_policy_empty_call(): client.set_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() @pytest.mark.asyncio async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6445,22 +6403,18 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - response = await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6470,17 +6424,17 @@ async def test_set_iam_policy_async_from_dict(): def test_set_iam_policy_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6495,17 +6449,17 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) - + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6519,29 +6473,27 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict_foreign(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() def test_set_iam_policy_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.set_iam_policy(resource="resource_value",) @@ -6550,31 +6502,30 @@ def test_set_iam_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" def test_set_iam_policy_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.set_iam_policy( - iam_policy.SetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value", ) @pytest.mark.asyncio async def test_set_iam_policy_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.set_iam_policy(resource="resource_value",) @@ -6583,27 +6534,26 @@ async def test_set_iam_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" @pytest.mark.asyncio async def test_set_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.set_iam_policy( - iam_policy.SetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value", ) def test_get_iam_policy( - transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest + transport: str = "grpc", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6613,22 +6563,17 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) - + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6640,7 +6585,7 @@ def test_get_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6648,16 +6593,15 @@ def test_get_iam_policy_empty_call(): client.get_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() @pytest.mark.asyncio async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6668,22 +6612,18 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - response = await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6693,17 +6633,17 @@ async def test_get_iam_policy_async_from_dict(): def test_get_iam_policy_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6718,17 +6658,17 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) - + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6742,29 +6682,27 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict_foreign(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() def test_get_iam_policy_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_iam_policy(resource="resource_value",) @@ -6773,31 +6711,30 @@ def test_get_iam_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" def test_get_iam_policy_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_iam_policy( - iam_policy.GetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value", ) @pytest.mark.asyncio async def test_get_iam_policy_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_iam_policy(resource="resource_value",) @@ -6806,27 +6743,26 @@ async def test_get_iam_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" @pytest.mark.asyncio async def test_get_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_iam_policy( - iam_policy.GetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value", ) def test_test_iam_permissions( - transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest + transport: str = "grpc", request_type=iam_policy_pb2.TestIamPermissionsRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6838,22 +6774,18 @@ def test_test_iam_permissions( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) - response = client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, iam_policy.TestIamPermissionsResponse) - + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -6865,7 +6797,7 @@ def test_test_iam_permissions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6875,16 +6807,16 @@ def test_test_iam_permissions_empty_call(): client.test_iam_permissions() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() @pytest.mark.asyncio async def test_test_iam_permissions_async( - transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6897,20 +6829,19 @@ async def test_test_iam_permissions_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) - response = await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) - + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -6920,19 +6851,19 @@ async def test_test_iam_permissions_async_from_dict(): def test_test_iam_permissions_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() - + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. @@ -6947,11 +6878,12 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -6959,9 +6891,8 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) - await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. @@ -6975,14 +6906,13 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict_foreign(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() - + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ "resource": "resource_value", @@ -6995,16 +6925,16 @@ def test_test_iam_permissions_from_dict_foreign(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( @@ -7014,7 +6944,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( @@ -7025,7 +6955,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DataCatalogClient(transport=transport) assert client.transport is transport @@ -7034,13 +6964,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DataCatalogGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -7052,23 +6982,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DataCatalogGrpcTransport,) def test_data_catalog_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DataCatalogTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -7080,7 +7010,7 @@ def test_data_catalog_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DataCatalogTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -7105,6 +7035,7 @@ def test_data_catalog_base_transport(): "create_tag_template_field", "update_tag_template_field", "rename_tag_template_field", + "rename_tag_template_field_enum_value", "delete_tag_template_field", "create_tag", "update_tag", @@ -7119,15 +7050,37 @@ def test_data_catalog_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_data_catalog_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_data_catalog_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -7140,19 +7093,33 @@ def test_data_catalog_base_transport_with_credentials_file(): def test_data_catalog_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_data_catalog_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataCatalogClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_data_catalog_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataCatalogClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -7160,26 +7127,156 @@ def test_data_catalog_auth_adc(): ) -def test_data_catalog_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], +) +@requires_google_auth_gte_1_25_0 +def test_data_catalog_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], +) +@requires_google_auth_lt_1_25_0 +def test_data_catalog_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_data_catalog_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_data_catalog_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) def test_data_catalog_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -7218,7 +7315,7 @@ def test_data_catalog_grpc_transport_client_cert_source_for_mtls(transport_class def test_data_catalog_host_no_port(): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com" ), @@ -7228,7 +7325,7 @@ def test_data_catalog_host_no_port(): def test_data_catalog_host_with_port(): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com:8000" ), @@ -7279,9 +7376,9 @@ def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_c mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -7357,7 +7454,6 @@ def test_entry_path(): location = "clam" entry_group = "whelk" entry = "octopus" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( project=project, location=location, entry_group=entry_group, entry=entry, ) @@ -7383,7 +7479,6 @@ def test_entry_group_path(): project = "winkle" location = "nautilus" entry_group = "scallop" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( project=project, location=location, entry_group=entry_group, ) @@ -7410,7 +7505,6 @@ def test_tag_path(): entry_group = "oyster" entry = "nudibranch" tag = "cuttlefish" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( project=project, location=location, @@ -7441,7 +7535,6 @@ def test_tag_template_path(): project = "squid" location = "clam" tag_template = "whelk" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( project=project, location=location, tag_template=tag_template, ) @@ -7467,7 +7560,6 @@ def test_tag_template_field_path(): location = "mussel" tag_template = "winkle" field = "nautilus" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, ) @@ -7491,9 +7583,42 @@ def test_parse_tag_template_field_path(): assert expected == actual -def test_common_billing_account_path(): - billing_account = "whelk" +def test_tag_template_field_enum_value_path(): + project = "whelk" + location = "octopus" + tag_template = "oyster" + tag_template_field_id = "nudibranch" + enum_value_display_name = "cuttlefish" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format( + project=project, + location=location, + tag_template=tag_template, + tag_template_field_id=tag_template_field_id, + enum_value_display_name=enum_value_display_name, + ) + actual = DataCatalogClient.tag_template_field_enum_value_path( + project, location, tag_template, tag_template_field_id, enum_value_display_name + ) + assert expected == actual + + +def test_parse_tag_template_field_enum_value_path(): + expected = { + "project": "mussel", + "location": "winkle", + "tag_template": "nautilus", + "tag_template_field_id": "scallop", + "enum_value_display_name": "abalone", + } + path = DataCatalogClient.tag_template_field_enum_value_path(**expected) + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_enum_value_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -7503,7 +7628,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "clam", } path = DataCatalogClient.common_billing_account_path(**expected) @@ -7513,8 +7638,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" - + folder = "whelk" expected = "folders/{folder}".format(folder=folder,) actual = DataCatalogClient.common_folder_path(folder) assert expected == actual @@ -7522,7 +7646,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "octopus", } path = DataCatalogClient.common_folder_path(**expected) @@ -7532,8 +7656,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" - + organization = "oyster" expected = "organizations/{organization}".format(organization=organization,) actual = DataCatalogClient.common_organization_path(organization) assert expected == actual @@ -7541,7 +7664,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nudibranch", } path = DataCatalogClient.common_organization_path(**expected) @@ -7551,8 +7674,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" - + project = "cuttlefish" expected = "projects/{project}".format(project=project,) actual = DataCatalogClient.common_project_path(project) assert expected == actual @@ -7560,7 +7682,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "mussel", } path = DataCatalogClient.common_project_path(**expected) @@ -7570,9 +7692,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" - + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -7582,8 +7703,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "scallop", + "location": "abalone", } path = DataCatalogClient.common_location_path(**expected) @@ -7599,7 +7720,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DataCatalogTransport, "_prep_wrapped_messages" ) as prep: client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -7608,6 +7729,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DataCatalogClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py new file mode 100644 index 00000000..0b80f8dc --- /dev/null +++ b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -0,0 +1,4213 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.policy_tag_manager import ( + PolicyTagManagerAsyncClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager import ( + PolicyTagManagerClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.services.policy_tag_manager import transports +from google.cloud.datacatalog_v1.services.policy_tag_manager.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager.transports.base import ( + _GOOGLE_AUTH_VERSION, +) +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerClient._get_default_mtls_endpoint(None) is None + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [PolicyTagManagerClient, PolicyTagManagerAsyncClient,] +) +def test_policy_tag_manager_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [PolicyTagManagerClient, PolicyTagManagerAsyncClient,] +) +def test_policy_tag_manager_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_client_get_transport_class(): + transport = PolicyTagManagerClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + PolicyTagManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerClient), +) +@mock.patch.object( + PolicyTagManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerAsyncClient), +) +def test_policy_tag_manager_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PolicyTagManagerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PolicyTagManagerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PolicyTagManagerClient, + transports.PolicyTagManagerGrpcTransport, + "grpc", + "true", + ), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PolicyTagManagerClient, + transports.PolicyTagManagerGrpcTransport, + "grpc", + "false", + ), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PolicyTagManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerClient), +) +@mock.patch.object( + PolicyTagManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_policy_tag_manager_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_taxonomy( + transport: str = "grpc", request_type=policytagmanager.CreateTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + response = client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +def test_create_taxonomy_from_dict(): + test_create_taxonomy(request_type=dict) + + +def test_create_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + client.create_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_create_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.CreateTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + ) + response = await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +@pytest.mark.asyncio +async def test_create_taxonomy_async_from_dict(): + await test_create_taxonomy_async(request_type=dict) + + +def test_create_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + call.return_value = policytagmanager.Taxonomy() + client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_taxonomy( + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +def test_create_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_taxonomy( + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent="parent_value", + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +def test_delete_taxonomy( + transport: str = "grpc", request_type=policytagmanager.DeleteTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_taxonomy_from_dict(): + test_delete_taxonomy(request_type=dict) + + +def test_delete_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + client.delete_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_delete_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.DeleteTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_taxonomy_async_from_dict(): + await test_delete_taxonomy_async(request_type=dict) + + +def test_delete_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + call.return_value = None + client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), name="name_value", + ) + + +def test_update_taxonomy( + transport: str = "grpc", request_type=policytagmanager.UpdateTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + response = client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +def test_update_taxonomy_from_dict(): + test_update_taxonomy(request_type=dict) + + +def test_update_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + client.update_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_update_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.UpdateTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + ) + response = await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +@pytest.mark.asyncio +async def test_update_taxonomy_async_from_dict(): + await test_update_taxonomy_async(request_type=dict) + + +def test_update_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + + request.taxonomy.name = "taxonomy.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + call.return_value = policytagmanager.Taxonomy() + client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "taxonomy.name=taxonomy.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + + request.taxonomy.name = "taxonomy.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "taxonomy.name=taxonomy.name/value",) in kw[ + "metadata" + ] + + +def test_update_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_taxonomy(taxonomy=policytagmanager.Taxonomy(name="name_value"),) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +def test_update_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_taxonomy( + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") + + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name="name_value"), + ) + + +def test_list_taxonomies( + transport: str = "grpc", request_type=policytagmanager.ListTaxonomiesRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_taxonomies_from_dict(): + test_list_taxonomies(request_type=dict) + + +def test_list_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + client.list_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + +@pytest.mark.asyncio +async def test_list_taxonomies_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.ListTaxonomiesRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListTaxonomiesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_from_dict(): + await test_list_taxonomies_async(request_type=dict) + + +def test_list_taxonomies_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + call.return_value = policytagmanager.ListTaxonomiesResponse() + client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_taxonomies_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListTaxonomiesResponse() + ) + await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_taxonomies_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_taxonomies(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_taxonomies_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListTaxonomiesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_taxonomies(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), parent="parent_value", + ) + + +def test_list_taxonomies_pager(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_taxonomies(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) for i in results) + + +def test_list_taxonomies_pages(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + pages = list(client.list_taxonomies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pager(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + async_pager = await client.list_taxonomies(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) for i in responses) + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pages(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token="abc", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], next_page_token="def", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(),], next_page_token="ghi", + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[policytagmanager.Taxonomy(), policytagmanager.Taxonomy(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_taxonomies(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_taxonomy( + transport: str = "grpc", request_type=policytagmanager.GetTaxonomyRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + response = client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +def test_get_taxonomy_from_dict(): + test_get_taxonomy(request_type=dict) + + +def test_get_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + client.get_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + +@pytest.mark.asyncio +async def test_get_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.GetTaxonomyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy( + name="name_value", + display_name="display_name_value", + description="description_value", + policy_tag_count=1715, + activated_policy_types=[ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ], + ) + ) + response = await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [ + policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL + ] + + +@pytest.mark.asyncio +async def test_get_taxonomy_async_from_dict(): + await test_get_taxonomy_async(request_type=dict) + + +def test_get_taxonomy_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + call.return_value = policytagmanager.Taxonomy() + client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_taxonomy_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_taxonomy_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.Taxonomy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_taxonomy(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), name="name_value", + ) + + +def test_create_policy_tag( + transport: str = "grpc", request_type=policytagmanager.CreatePolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + response = client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent_policy_tag == "parent_policy_tag_value" + assert response.child_policy_tags == ["child_policy_tags_value"] + + +def test_create_policy_tag_from_dict(): + test_create_policy_tag(request_type=dict) + + +def test_create_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + client.create_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + +@pytest.mark.asyncio +async def test_create_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.CreatePolicyTagRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + ) + response = await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent_policy_tag == "parent_policy_tag_value" + assert response.child_policy_tags == ["child_policy_tags_value"] + + +@pytest.mark.asyncio +async def test_create_policy_tag_async_from_dict(): + await test_create_policy_tag_async(request_type=dict) + + +def test_create_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + call.return_value = policytagmanager.PolicyTag() + client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_policy_tag( + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +def test_create_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_policy_tag( + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent="parent_value", + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +def test_delete_policy_tag( + transport: str = "grpc", request_type=policytagmanager.DeletePolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_policy_tag_from_dict(): + test_delete_policy_tag(request_type=dict) + + +def test_delete_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + client.delete_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + +@pytest.mark.asyncio +async def test_delete_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.DeletePolicyTagRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_policy_tag_async_from_dict(): + await test_delete_policy_tag_async(request_type=dict) + + +def test_delete_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + call.return_value = None + client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), name="name_value", + ) + + +def test_update_policy_tag( + transport: str = "grpc", request_type=policytagmanager.UpdatePolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + response = client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent_policy_tag == "parent_policy_tag_value" + assert response.child_policy_tags == ["child_policy_tags_value"] + + +def test_update_policy_tag_from_dict(): + test_update_policy_tag(request_type=dict) + + +def test_update_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + client.update_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + +@pytest.mark.asyncio +async def test_update_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.UpdatePolicyTagRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + ) + response = await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent_policy_tag == "parent_policy_tag_value" + assert response.child_policy_tags == ["child_policy_tags_value"] + + +@pytest.mark.asyncio +async def test_update_policy_tag_async_from_dict(): + await test_update_policy_tag_async(request_type=dict) + + +def test_update_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + + request.policy_tag.name = "policy_tag.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + call.return_value = policytagmanager.PolicyTag() + client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "policy_tag.name=policy_tag.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + + request.policy_tag.name = "policy_tag.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "policy_tag.name=policy_tag.name/value",) in kw[ + "metadata" + ] + + +def test_update_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +def test_update_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") + + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name="name_value"), + ) + + +def test_list_policy_tags( + transport: str = "grpc", request_type=policytagmanager.ListPolicyTagsRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_policy_tags_from_dict(): + test_list_policy_tags(request_type=dict) + + +def test_list_policy_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + client.list_policy_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + +@pytest.mark.asyncio +async def test_list_policy_tags_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.ListPolicyTagsRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListPolicyTagsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_from_dict(): + await test_list_policy_tags_async(request_type=dict) + + +def test_list_policy_tags_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + call.return_value = policytagmanager.ListPolicyTagsResponse() + client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_policy_tags_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListPolicyTagsResponse() + ) + await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_policy_tags_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_policy_tags(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_policy_tags_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.ListPolicyTagsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_policy_tags(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), parent="parent_value", + ) + + +def test_list_policy_tags_pager(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_policy_tags(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) for i in results) + + +def test_list_policy_tags_pages(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_policy_tags(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pager(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_policy_tags(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) for i in responses) + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pages(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token="abc", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], next_page_token="def", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[policytagmanager.PolicyTag(),], next_page_token="ghi", + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_policy_tags(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_policy_tag( + transport: str = "grpc", request_type=policytagmanager.GetPolicyTagRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + response = client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent_policy_tag == "parent_policy_tag_value" + assert response.child_policy_tags == ["child_policy_tags_value"] + + +def test_get_policy_tag_from_dict(): + test_get_policy_tag(request_type=dict) + + +def test_get_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + client.get_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + +@pytest.mark.asyncio +async def test_get_policy_tag_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.GetPolicyTagRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag( + name="name_value", + display_name="display_name_value", + description="description_value", + parent_policy_tag="parent_policy_tag_value", + child_policy_tags=["child_policy_tags_value"], + ) + ) + response = await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent_policy_tag == "parent_policy_tag_value" + assert response.child_policy_tags == ["child_policy_tags_value"] + + +@pytest.mark.asyncio +async def test_get_policy_tag_async_from_dict(): + await test_get_policy_tag_async(request_type=dict) + + +def test_get_policy_tag_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + call.return_value = policytagmanager.PolicyTag() + client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_policy_tag_flattened(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_policy_tag_flattened_error(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanager.PolicyTag() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_policy_tag(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), name="name_value", + ) + + +def test_get_iam_policy( + transport: str = "grpc", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_set_iam_policy( + transport: str = "grpc", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_test_iam_permissions( + transport: str = "grpc", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.PolicyTagManagerGrpcTransport,) + + +def test_policy_tag_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_policy_tag_manager_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_taxonomy", + "delete_taxonomy", + "update_taxonomy", + "list_taxonomies", + "get_taxonomy", + "create_policy_tag", + "delete_policy_tag", + "update_policy_tag", + "list_policy_tags", + "get_policy_tag", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_policy_tag_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_policy_tag_manager_host_no_port(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_host_with_port(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com:8000" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:8000" + + +def test_policy_tag_manager_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_policy_tag_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + policy_tag = "octopus" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( + project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, + ) + actual = PolicyTagManagerClient.policy_tag_path( + project, location, taxonomy, policy_tag + ) + assert expected == actual + + +def test_parse_policy_tag_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "taxonomy": "cuttlefish", + "policy_tag": "mussel", + } + path = PolicyTagManagerClient.policy_tag_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_policy_tag_path(path) + assert expected == actual + + +def test_taxonomy_path(): + project = "winkle" + location = "nautilus" + taxonomy = "scallop" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "abalone", + "location": "squid", + "taxonomy": "clam", + } + path = PolicyTagManagerClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_taxonomy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PolicyTagManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PolicyTagManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder,) + actual = PolicyTagManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PolicyTagManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization,) + actual = PolicyTagManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PolicyTagManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project,) + actual = PolicyTagManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PolicyTagManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PolicyTagManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PolicyTagManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PolicyTagManagerTransport, "_prep_wrapped_messages" + ) as prep: + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PolicyTagManagerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PolicyTagManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py new file mode 100644 index 00000000..6adf5953 --- /dev/null +++ b/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -0,0 +1,1493 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationAsyncClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import ( + PolicyTagManagerSerializationClient, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import ( + transports, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.base import ( + _GOOGLE_AUTH_VERSION, +) +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.oauth2 import service_account +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(None) is None + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + PolicyTagManagerSerializationClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", + [PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient,], +) +def test_policy_tag_manager_serialization_client_from_service_account_info( + client_class, +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", + [PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient,], +) +def test_policy_tag_manager_serialization_client_from_service_account_file( + client_class, +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_serialization_client_get_transport_class(): + transport = PolicyTagManagerSerializationClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerSerializationGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerSerializationClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerSerializationGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + PolicyTagManagerSerializationClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationClient), +) +@mock.patch.object( + PolicyTagManagerSerializationAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationAsyncClient), +) +def test_policy_tag_manager_serialization_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + PolicyTagManagerSerializationClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + PolicyTagManagerSerializationClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + "true", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + "false", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PolicyTagManagerSerializationClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationClient), +) +@mock.patch.object( + PolicyTagManagerSerializationAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_serialization_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_serialization_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_policy_tag_manager_serialization_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_policy_tag_manager_serialization_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerSerializationClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_import_taxonomies( + transport: str = "grpc", + request_type=policytagmanagerserialization.ImportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() + response = client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +def test_import_taxonomies_from_dict(): + test_import_taxonomies(request_type=dict) + + +def test_import_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + client.import_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + +@pytest.mark.asyncio +async def test_import_taxonomies_async( + transport: str = "grpc_asyncio", + request_type=policytagmanagerserialization.ImportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ImportTaxonomiesResponse() + ) + response = await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_import_taxonomies_async_from_dict(): + await test_import_taxonomies_async(request_type=dict) + + +def test_import_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() + client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ImportTaxonomiesResponse() + ) + await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_export_taxonomies( + transport: str = "grpc", + request_type=policytagmanagerserialization.ExportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() + response = client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +def test_export_taxonomies_from_dict(): + test_export_taxonomies(request_type=dict) + + +def test_export_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + client.export_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + +@pytest.mark.asyncio +async def test_export_taxonomies_async( + transport: str = "grpc_asyncio", + request_type=policytagmanagerserialization.ExportTaxonomiesRequest, +): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ExportTaxonomiesResponse() + ) + response = await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_export_taxonomies_async_from_dict(): + await test_export_taxonomies_async(request_type=dict) + + +def test_export_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() + client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policytagmanagerserialization.ExportTaxonomiesResponse() + ) + await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerSerializationClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, transports.PolicyTagManagerSerializationGrpcTransport, + ) + + +def test_policy_tag_manager_serialization_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerSerializationTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_policy_tag_manager_serialization_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerSerializationTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "import_taxonomies", + "export_taxonomies", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_serialization_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_serialization_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_serialization_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerSerializationClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_serialization_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerSerializationClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_serialization_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_serialization_transport_auth_adc_old_google_auth( + transport_class, +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + ( + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +@requires_api_core_gte_1_26_0 +def test_policy_tag_manager_serialization_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + ( + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_serialization_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + ( + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_serialization_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_policy_tag_manager_serialization_host_no_port(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:443" + + +def test_policy_tag_manager_serialization_host_with_port(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datacatalog.googleapis.com:8000" + ), + ) + assert client.transport._host == "datacatalog.googleapis.com:8000" + + +def test_policy_tag_manager_serialization_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_taxonomy_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + actual = PolicyTagManagerSerializationClient.taxonomy_path( + project, location, taxonomy + ) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "taxonomy": "nudibranch", + } + path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PolicyTagManagerSerializationClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder,) + actual = PolicyTagManagerSerializationClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = PolicyTagManagerSerializationClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization,) + actual = PolicyTagManagerSerializationClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = PolicyTagManagerSerializationClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project,) + actual = PolicyTagManagerSerializationClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = PolicyTagManagerSerializationClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PolicyTagManagerSerializationClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = PolicyTagManagerSerializationClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" + ) as prep: + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PolicyTagManagerSerializationClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1beta1/__init__.py b/tests/unit/gapic/datacatalog_v1beta1/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/__init__.py +++ b/tests/unit/gapic/datacatalog_v1beta1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 1d7aeb41..00e75980 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datacatalog_v1beta1.services.data_catalog import ( DataCatalogAsyncClient, @@ -38,6 +37,12 @@ from google.cloud.datacatalog_v1beta1.services.data_catalog import DataCatalogClient from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers from google.cloud.datacatalog_v1beta1.services.data_catalog import transports +from google.cloud.datacatalog_v1beta1.services.data_catalog.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.datacatalog_v1beta1.services.data_catalog.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.datacatalog_v1beta1.types import common from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec @@ -46,13 +51,37 @@ from google.cloud.datacatalog_v1beta1.types import table_spec from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.type import expr_pb2 as expr # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -98,7 +127,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [DataCatalogClient, DataCatalogAsyncClient,]) def test_data_catalog_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -113,7 +142,7 @@ def test_data_catalog_client_from_service_account_info(client_class): @pytest.mark.parametrize("client_class", [DataCatalogClient, DataCatalogAsyncClient,]) def test_data_catalog_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -164,7 +193,7 @@ def test_data_catalog_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DataCatalogClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -448,7 +477,7 @@ def test_search_catalog( transport: str = "grpc", request_type=datacatalog.SearchCatalogRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -461,19 +490,15 @@ def test_search_catalog( call.return_value = datacatalog.SearchCatalogResponse( next_page_token="next_page_token_value", ) - response = client.search_catalog(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchCatalogPager) - assert response.next_page_token == "next_page_token_value" @@ -485,7 +510,7 @@ def test_search_catalog_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -493,7 +518,6 @@ def test_search_catalog_empty_call(): client.search_catalog() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() @@ -502,7 +526,7 @@ async def test_search_catalog_async( transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -515,18 +539,15 @@ async def test_search_catalog_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.SearchCatalogResponse(next_page_token="next_page_token_value",) ) - response = await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -536,13 +557,12 @@ async def test_search_catalog_async_from_dict(): def test_search_catalog_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_catalog( @@ -556,16 +576,14 @@ def test_search_catalog_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].scope == datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ) - assert args[0].query == "query_value" def test_search_catalog_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -581,7 +599,7 @@ def test_search_catalog_flattened_error(): @pytest.mark.asyncio async def test_search_catalog_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: @@ -604,17 +622,15 @@ async def test_search_catalog_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].scope == datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ) - assert args[0].query == "query_value" @pytest.mark.asyncio async def test_search_catalog_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -629,7 +645,7 @@ async def test_search_catalog_flattened_error_async(): def test_search_catalog_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: @@ -664,7 +680,7 @@ def test_search_catalog_pager(): def test_search_catalog_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: @@ -694,7 +710,7 @@ def test_search_catalog_pages(): @pytest.mark.asyncio async def test_search_catalog_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -731,7 +747,7 @@ async def test_search_catalog_async_pager(): @pytest.mark.asyncio async def test_search_catalog_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -767,7 +783,7 @@ def test_create_entry_group( transport: str = "grpc", request_type=datacatalog.CreateEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -784,23 +800,17 @@ def test_create_entry_group( display_name="display_name_value", description="description_value", ) - response = client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -812,7 +822,7 @@ def test_create_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -822,7 +832,6 @@ def test_create_entry_group_empty_call(): client.create_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() @@ -831,7 +840,7 @@ async def test_create_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -850,22 +859,17 @@ async def test_create_entry_group_async( description="description_value", ) ) - response = await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -875,11 +879,12 @@ async def test_create_entry_group_async_from_dict(): def test_create_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryGroupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -887,7 +892,6 @@ def test_create_entry_group_field_headers(): type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() - client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -902,11 +906,12 @@ def test_create_entry_group_field_headers(): @pytest.mark.asyncio async def test_create_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryGroupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -916,7 +921,6 @@ async def test_create_entry_group_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) - await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -930,7 +934,7 @@ async def test_create_entry_group_field_headers_async(): def test_create_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -938,7 +942,6 @@ def test_create_entry_group_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_entry_group( @@ -951,16 +954,13 @@ def test_create_entry_group_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_group_id == "entry_group_id_value" - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") def test_create_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -975,7 +975,7 @@ def test_create_entry_group_flattened_error(): @pytest.mark.asyncio async def test_create_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -999,17 +999,14 @@ async def test_create_entry_group_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_group_id == "entry_group_id_value" - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") @pytest.mark.asyncio async def test_create_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1026,7 +1023,7 @@ def test_update_entry_group( transport: str = "grpc", request_type=datacatalog.UpdateEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1043,23 +1040,17 @@ def test_update_entry_group( display_name="display_name_value", description="description_value", ) - response = client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1071,7 +1062,7 @@ def test_update_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1081,7 +1072,6 @@ def test_update_entry_group_empty_call(): client.update_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() @@ -1090,7 +1080,7 @@ async def test_update_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1109,22 +1099,17 @@ async def test_update_entry_group_async( description="description_value", ) ) - response = await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1134,11 +1119,12 @@ async def test_update_entry_group_async_from_dict(): def test_update_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryGroupRequest() + request.entry_group.name = "entry_group.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1146,7 +1132,6 @@ def test_update_entry_group_field_headers(): type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() - client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1163,11 +1148,12 @@ def test_update_entry_group_field_headers(): @pytest.mark.asyncio async def test_update_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryGroupRequest() + request.entry_group.name = "entry_group.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1177,7 +1163,6 @@ async def test_update_entry_group_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) - await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1193,7 +1178,7 @@ async def test_update_entry_group_field_headers_async(): def test_update_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1201,26 +1186,23 @@ def test_update_entry_group_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_entry_group( entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1228,13 +1210,13 @@ def test_update_entry_group_flattened_error(): client.update_entry_group( datacatalog.UpdateEntryGroupRequest(), entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1250,22 +1232,20 @@ async def test_update_entry_group_flattened_async(): # using the keyword arguments to the method. response = await client.update_entry_group( entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1273,7 +1253,7 @@ async def test_update_entry_group_flattened_error_async(): await client.update_entry_group( datacatalog.UpdateEntryGroupRequest(), entry_group=datacatalog.EntryGroup(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1281,7 +1261,7 @@ def test_get_entry_group( transport: str = "grpc", request_type=datacatalog.GetEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1296,23 +1276,17 @@ def test_get_entry_group( display_name="display_name_value", description="description_value", ) - response = client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1324,7 +1298,7 @@ def test_get_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1332,7 +1306,6 @@ def test_get_entry_group_empty_call(): client.get_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() @@ -1341,7 +1314,7 @@ async def test_get_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1358,22 +1331,17 @@ async def test_get_entry_group_async( description="description_value", ) ) - response = await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1383,17 +1351,17 @@ async def test_get_entry_group_async_from_dict(): def test_get_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = datacatalog.EntryGroup() - client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1408,11 +1376,12 @@ def test_get_entry_group_field_headers(): @pytest.mark.asyncio async def test_get_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1420,7 +1389,6 @@ async def test_get_entry_group_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) - await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1434,31 +1402,29 @@ async def test_get_entry_group_field_headers_async(): def test_get_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_entry_group( - name="name_value", read_mask=field_mask.FieldMask(paths=["paths_value"]), + name="name_value", + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].read_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].read_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_get_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1466,13 +1432,13 @@ def test_get_entry_group_flattened_error(): client.get_entry_group( datacatalog.GetEntryGroupRequest(), name="name_value", - read_mask=field_mask.FieldMask(paths=["paths_value"]), + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_get_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: @@ -1485,22 +1451,21 @@ async def test_get_entry_group_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_entry_group( - name="name_value", read_mask=field_mask.FieldMask(paths=["paths_value"]), + name="name_value", + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - - assert args[0].read_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].read_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_get_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1508,7 +1473,7 @@ async def test_get_entry_group_flattened_error_async(): await client.get_entry_group( datacatalog.GetEntryGroupRequest(), name="name_value", - read_mask=field_mask.FieldMask(paths=["paths_value"]), + read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1516,7 +1481,7 @@ def test_delete_entry_group( transport: str = "grpc", request_type=datacatalog.DeleteEntryGroupRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1529,13 +1494,11 @@ def test_delete_entry_group( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. @@ -1550,7 +1513,7 @@ def test_delete_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1560,7 +1523,6 @@ def test_delete_entry_group_empty_call(): client.delete_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() @@ -1569,7 +1531,7 @@ async def test_delete_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1582,13 +1544,11 @@ async def test_delete_entry_group_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. @@ -1601,11 +1561,12 @@ async def test_delete_entry_group_async_from_dict(): def test_delete_entry_group_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1613,7 +1574,6 @@ def test_delete_entry_group_field_headers(): type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = None - client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1628,11 +1588,12 @@ def test_delete_entry_group_field_headers(): @pytest.mark.asyncio async def test_delete_entry_group_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryGroupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1640,7 +1601,6 @@ async def test_delete_entry_group_field_headers_async(): type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. @@ -1654,7 +1614,7 @@ async def test_delete_entry_group_field_headers_async(): def test_delete_entry_group_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1662,7 +1622,6 @@ def test_delete_entry_group_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_entry_group(name="name_value",) @@ -1671,12 +1630,11 @@ def test_delete_entry_group_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_entry_group_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1688,7 +1646,7 @@ def test_delete_entry_group_flattened_error(): @pytest.mark.asyncio async def test_delete_entry_group_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1706,13 +1664,12 @@ async def test_delete_entry_group_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1726,7 +1683,7 @@ def test_list_entry_groups( transport: str = "grpc", request_type=datacatalog.ListEntryGroupsRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1741,19 +1698,15 @@ def test_list_entry_groups( call.return_value = datacatalog.ListEntryGroupsResponse( next_page_token="next_page_token_value", ) - response = client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == "next_page_token_value" @@ -1765,7 +1718,7 @@ def test_list_entry_groups_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1775,7 +1728,6 @@ def test_list_entry_groups_empty_call(): client.list_entry_groups() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() @@ -1784,7 +1736,7 @@ async def test_list_entry_groups_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1801,18 +1753,15 @@ async def test_list_entry_groups_async( next_page_token="next_page_token_value", ) ) - response = await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntryGroupsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1822,11 +1771,12 @@ async def test_list_entry_groups_async_from_dict(): def test_list_entry_groups_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntryGroupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1834,7 +1784,6 @@ def test_list_entry_groups_field_headers(): type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = datacatalog.ListEntryGroupsResponse() - client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. @@ -1849,11 +1798,12 @@ def test_list_entry_groups_field_headers(): @pytest.mark.asyncio async def test_list_entry_groups_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntryGroupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1863,7 +1813,6 @@ async def test_list_entry_groups_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse() ) - await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. @@ -1877,7 +1826,7 @@ async def test_list_entry_groups_field_headers_async(): def test_list_entry_groups_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1885,7 +1834,6 @@ def test_list_entry_groups_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_entry_groups(parent="parent_value",) @@ -1894,12 +1842,11 @@ def test_list_entry_groups_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_entry_groups_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1911,7 +1858,7 @@ def test_list_entry_groups_flattened_error(): @pytest.mark.asyncio async def test_list_entry_groups_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1931,13 +1878,12 @@ async def test_list_entry_groups_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_entry_groups_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1948,7 +1894,7 @@ async def test_list_entry_groups_flattened_error_async(): def test_list_entry_groups_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1990,7 +1936,7 @@ def test_list_entry_groups_pager(): def test_list_entry_groups_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2024,7 +1970,7 @@ def test_list_entry_groups_pages(): @pytest.mark.asyncio async def test_list_entry_groups_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2065,7 +2011,7 @@ async def test_list_entry_groups_async_pager(): @pytest.mark.asyncio async def test_list_entry_groups_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2105,7 +2051,7 @@ def test_create_entry( transport: str = "grpc", request_type=datacatalog.CreateEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2126,25 +2072,18 @@ def test_create_entry( file_patterns=["file_patterns_value"] ), ) - response = client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2156,7 +2095,7 @@ def test_create_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2164,7 +2103,6 @@ def test_create_entry_empty_call(): client.create_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() @@ -2173,7 +2111,7 @@ async def test_create_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2191,24 +2129,18 @@ async def test_create_entry_async( description="description_value", ) ) - response = await client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2218,17 +2150,17 @@ async def test_create_entry_async_from_dict(): def test_create_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = datacatalog.Entry() - client.create_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2243,17 +2175,17 @@ def test_create_entry_field_headers(): @pytest.mark.asyncio async def test_create_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.create_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2267,13 +2199,12 @@ async def test_create_entry_field_headers_async(): def test_create_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_entry( @@ -2286,16 +2217,13 @@ def test_create_entry_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_id == "entry_id_value" - assert args[0].entry == datacatalog.Entry(name="name_value") def test_create_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2310,7 +2238,7 @@ def test_create_entry_flattened_error(): @pytest.mark.asyncio async def test_create_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: @@ -2330,17 +2258,14 @@ async def test_create_entry_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].entry_id == "entry_id_value" - assert args[0].entry == datacatalog.Entry(name="name_value") @pytest.mark.asyncio async def test_create_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2357,7 +2282,7 @@ def test_update_entry( transport: str = "grpc", request_type=datacatalog.UpdateEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2378,25 +2303,18 @@ def test_update_entry( file_patterns=["file_patterns_value"] ), ) - response = client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2408,7 +2326,7 @@ def test_update_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2416,7 +2334,6 @@ def test_update_entry_empty_call(): client.update_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() @@ -2425,7 +2342,7 @@ async def test_update_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2443,24 +2360,18 @@ async def test_update_entry_async( description="description_value", ) ) - response = await client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2470,17 +2381,17 @@ async def test_update_entry_async_from_dict(): def test_update_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryRequest() + request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = datacatalog.Entry() - client.update_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2495,17 +2406,17 @@ def test_update_entry_field_headers(): @pytest.mark.asyncio async def test_update_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryRequest() + request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.update_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2519,32 +2430,29 @@ async def test_update_entry_field_headers_async(): def test_update_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_entry( entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].entry == datacatalog.Entry(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2552,13 +2460,13 @@ def test_update_entry_flattened_error(): client.update_entry( datacatalog.UpdateEntryRequest(), entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: @@ -2570,22 +2478,20 @@ async def test_update_entry_flattened_async(): # using the keyword arguments to the method. response = await client.update_entry( entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].entry == datacatalog.Entry(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2593,7 +2499,7 @@ async def test_update_entry_flattened_error_async(): await client.update_entry( datacatalog.UpdateEntryRequest(), entry=datacatalog.Entry(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2601,7 +2507,7 @@ def test_delete_entry( transport: str = "grpc", request_type=datacatalog.DeleteEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2612,13 +2518,11 @@ def test_delete_entry( with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. @@ -2633,7 +2537,7 @@ def test_delete_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2641,7 +2545,6 @@ def test_delete_entry_empty_call(): client.delete_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() @@ -2650,7 +2553,7 @@ async def test_delete_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2661,13 +2564,11 @@ async def test_delete_entry_async( with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. @@ -2680,17 +2581,17 @@ async def test_delete_entry_async_from_dict(): def test_delete_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = None - client.delete_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2705,17 +2606,17 @@ def test_delete_entry_field_headers(): @pytest.mark.asyncio async def test_delete_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2729,13 +2630,12 @@ async def test_delete_entry_field_headers_async(): def test_delete_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_entry(name="name_value",) @@ -2744,12 +2644,11 @@ def test_delete_entry_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2761,7 +2660,7 @@ def test_delete_entry_flattened_error(): @pytest.mark.asyncio async def test_delete_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: @@ -2777,13 +2676,12 @@ async def test_delete_entry_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2795,7 +2693,7 @@ async def test_delete_entry_flattened_error_async(): def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2816,25 +2714,18 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq file_patterns=["file_patterns_value"] ), ) - response = client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2846,7 +2737,7 @@ def test_get_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2854,7 +2745,6 @@ def test_get_entry_empty_call(): client.get_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() @@ -2863,7 +2753,7 @@ async def test_get_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2881,24 +2771,18 @@ async def test_get_entry_async( description="description_value", ) ) - response = await client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2908,17 +2792,17 @@ async def test_get_entry_async_from_dict(): def test_get_entry_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = datacatalog.Entry() - client.get_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2933,17 +2817,17 @@ def test_get_entry_field_headers(): @pytest.mark.asyncio async def test_get_entry_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.get_entry(request) # Establish that the underlying gRPC stub method was called. @@ -2957,13 +2841,12 @@ async def test_get_entry_field_headers_async(): def test_get_entry_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_entry(name="name_value",) @@ -2972,12 +2855,11 @@ def test_get_entry_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_entry_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2989,7 +2871,7 @@ def test_get_entry_flattened_error(): @pytest.mark.asyncio async def test_get_entry_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: @@ -3005,13 +2887,12 @@ async def test_get_entry_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_entry_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3025,7 +2906,7 @@ def test_lookup_entry( transport: str = "grpc", request_type=datacatalog.LookupEntryRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3046,25 +2927,18 @@ def test_lookup_entry( file_patterns=["file_patterns_value"] ), ) - response = client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -3076,7 +2950,7 @@ def test_lookup_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3084,7 +2958,6 @@ def test_lookup_entry_empty_call(): client.lookup_entry() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() @@ -3093,7 +2966,7 @@ async def test_lookup_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3111,24 +2984,18 @@ async def test_lookup_entry_async( description="description_value", ) ) - response = await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) - assert response.name == "name_value" - assert response.linked_resource == "linked_resource_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -3141,7 +3008,7 @@ def test_list_entries( transport: str = "grpc", request_type=datacatalog.ListEntriesRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3154,19 +3021,15 @@ def test_list_entries( call.return_value = datacatalog.ListEntriesResponse( next_page_token="next_page_token_value", ) - response = client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == "next_page_token_value" @@ -3178,7 +3041,7 @@ def test_list_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3186,7 +3049,6 @@ def test_list_entries_empty_call(): client.list_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() @@ -3195,7 +3057,7 @@ async def test_list_entries_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3208,18 +3070,15 @@ async def test_list_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3229,17 +3088,17 @@ async def test_list_entries_async_from_dict(): def test_list_entries_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntriesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = datacatalog.ListEntriesResponse() - client.list_entries(request) # Establish that the underlying gRPC stub method was called. @@ -3254,11 +3113,12 @@ def test_list_entries_field_headers(): @pytest.mark.asyncio async def test_list_entries_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntriesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3266,7 +3126,6 @@ async def test_list_entries_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse() ) - await client.list_entries(request) # Establish that the underlying gRPC stub method was called. @@ -3280,13 +3139,12 @@ async def test_list_entries_field_headers_async(): def test_list_entries_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_entries(parent="parent_value",) @@ -3295,12 +3153,11 @@ def test_list_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_entries_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3312,7 +3169,7 @@ def test_list_entries_flattened_error(): @pytest.mark.asyncio async def test_list_entries_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: @@ -3330,13 +3187,12 @@ async def test_list_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_entries_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3347,7 +3203,7 @@ async def test_list_entries_flattened_error_async(): def test_list_entries_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: @@ -3385,7 +3241,7 @@ def test_list_entries_pager(): def test_list_entries_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: @@ -3415,7 +3271,7 @@ def test_list_entries_pages(): @pytest.mark.asyncio async def test_list_entries_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3452,7 +3308,7 @@ async def test_list_entries_async_pager(): @pytest.mark.asyncio async def test_list_entries_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3488,7 +3344,7 @@ def test_create_tag_template( transport: str = "grpc", request_type=datacatalog.CreateTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3503,21 +3359,16 @@ def test_create_tag_template( call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) - response = client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3529,7 +3380,7 @@ def test_create_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3539,7 +3390,6 @@ def test_create_tag_template_empty_call(): client.create_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() @@ -3548,7 +3398,7 @@ async def test_create_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3563,20 +3413,16 @@ async def test_create_tag_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) - response = await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3586,11 +3432,12 @@ async def test_create_tag_template_async_from_dict(): def test_create_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3598,7 +3445,6 @@ def test_create_tag_template_field_headers(): type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() - client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3613,11 +3459,12 @@ def test_create_tag_template_field_headers(): @pytest.mark.asyncio async def test_create_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3625,7 +3472,6 @@ async def test_create_tag_template_field_headers_async(): type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3639,7 +3485,7 @@ async def test_create_tag_template_field_headers_async(): def test_create_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3647,7 +3493,6 @@ def test_create_tag_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag_template( @@ -3660,16 +3505,13 @@ def test_create_tag_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_id == "tag_template_id_value" - assert args[0].tag_template == tags.TagTemplate(name="name_value") def test_create_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3684,7 +3526,7 @@ def test_create_tag_template_flattened_error(): @pytest.mark.asyncio async def test_create_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3706,17 +3548,14 @@ async def test_create_tag_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_id == "tag_template_id_value" - assert args[0].tag_template == tags.TagTemplate(name="name_value") @pytest.mark.asyncio async def test_create_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3733,7 +3572,7 @@ def test_get_tag_template( transport: str = "grpc", request_type=datacatalog.GetTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3746,21 +3585,16 @@ def test_get_tag_template( call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) - response = client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3772,7 +3606,7 @@ def test_get_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3780,7 +3614,6 @@ def test_get_tag_template_empty_call(): client.get_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() @@ -3789,7 +3622,7 @@ async def test_get_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3802,20 +3635,16 @@ async def test_get_tag_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) - response = await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3825,17 +3654,17 @@ async def test_get_tag_template_async_from_dict(): def test_get_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = tags.TagTemplate() - client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3850,17 +3679,17 @@ def test_get_tag_template_field_headers(): @pytest.mark.asyncio async def test_get_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -3874,13 +3703,12 @@ async def test_get_tag_template_field_headers_async(): def test_get_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_tag_template(name="name_value",) @@ -3889,12 +3717,11 @@ def test_get_tag_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3906,7 +3733,7 @@ def test_get_tag_template_flattened_error(): @pytest.mark.asyncio async def test_get_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: @@ -3922,13 +3749,12 @@ async def test_get_tag_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3942,7 +3768,7 @@ def test_update_tag_template( transport: str = "grpc", request_type=datacatalog.UpdateTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3957,21 +3783,16 @@ def test_update_tag_template( call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) - response = client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -3983,7 +3804,7 @@ def test_update_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3993,7 +3814,6 @@ def test_update_tag_template_empty_call(): client.update_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() @@ -4002,7 +3822,7 @@ async def test_update_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4017,20 +3837,16 @@ async def test_update_tag_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) - response = await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" @@ -4040,11 +3856,12 @@ async def test_update_tag_template_async_from_dict(): def test_update_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateRequest() + request.tag_template.name = "tag_template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4052,7 +3869,6 @@ def test_update_tag_template_field_headers(): type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() - client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4070,11 +3886,12 @@ def test_update_tag_template_field_headers(): @pytest.mark.asyncio async def test_update_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateRequest() + request.tag_template.name = "tag_template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4082,7 +3899,6 @@ async def test_update_tag_template_field_headers_async(): type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4099,7 +3915,7 @@ async def test_update_tag_template_field_headers_async(): def test_update_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4107,26 +3923,23 @@ def test_update_tag_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag_template( tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].tag_template == tags.TagTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4134,13 +3947,13 @@ def test_update_tag_template_flattened_error(): client.update_tag_template( datacatalog.UpdateTagTemplateRequest(), tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4154,22 +3967,20 @@ async def test_update_tag_template_flattened_async(): # using the keyword arguments to the method. response = await client.update_tag_template( tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].tag_template == tags.TagTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4177,7 +3988,7 @@ async def test_update_tag_template_flattened_error_async(): await client.update_tag_template( datacatalog.UpdateTagTemplateRequest(), tag_template=tags.TagTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4185,7 +3996,7 @@ def test_delete_tag_template( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4198,13 +4009,11 @@ def test_delete_tag_template( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. @@ -4219,7 +4028,7 @@ def test_delete_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4229,7 +4038,6 @@ def test_delete_tag_template_empty_call(): client.delete_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() @@ -4238,7 +4046,7 @@ async def test_delete_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4251,13 +4059,11 @@ async def test_delete_tag_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. @@ -4270,11 +4076,12 @@ async def test_delete_tag_template_async_from_dict(): def test_delete_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4282,7 +4089,6 @@ def test_delete_tag_template_field_headers(): type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = None - client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4297,11 +4103,12 @@ def test_delete_tag_template_field_headers(): @pytest.mark.asyncio async def test_delete_tag_template_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4309,7 +4116,6 @@ async def test_delete_tag_template_field_headers_async(): type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. @@ -4323,7 +4129,7 @@ async def test_delete_tag_template_field_headers_async(): def test_delete_tag_template_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4331,7 +4137,6 @@ def test_delete_tag_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag_template( @@ -4342,14 +4147,12 @@ def test_delete_tag_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True def test_delete_tag_template_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4361,7 +4164,7 @@ def test_delete_tag_template_flattened_error(): @pytest.mark.asyncio async def test_delete_tag_template_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4379,15 +4182,13 @@ async def test_delete_tag_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True @pytest.mark.asyncio async def test_delete_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4401,7 +4202,7 @@ def test_create_tag_template_field( transport: str = "grpc", request_type=datacatalog.CreateTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4419,25 +4220,18 @@ def test_create_tag_template_field( is_required=True, order=540, ) - response = client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - assert response.order == 540 @@ -4449,7 +4243,7 @@ def test_create_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4459,7 +4253,6 @@ def test_create_tag_template_field_empty_call(): client.create_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() @@ -4469,7 +4262,7 @@ async def test_create_tag_template_field_async( request_type=datacatalog.CreateTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4489,24 +4282,18 @@ async def test_create_tag_template_field_async( order=540, ) ) - response = await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - assert response.order == 540 @@ -4516,11 +4303,12 @@ async def test_create_tag_template_field_async_from_dict(): def test_create_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateFieldRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4528,7 +4316,6 @@ def test_create_tag_template_field_field_headers(): type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() - client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4543,11 +4330,12 @@ def test_create_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_create_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateFieldRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4557,7 +4345,6 @@ async def test_create_tag_template_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) - await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4571,7 +4358,7 @@ async def test_create_tag_template_field_field_headers_async(): def test_create_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4579,7 +4366,6 @@ def test_create_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag_template_field( @@ -4592,16 +4378,13 @@ def test_create_tag_template_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_field_id == "tag_template_field_id_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") def test_create_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4616,7 +4399,7 @@ def test_create_tag_template_field_flattened_error(): @pytest.mark.asyncio async def test_create_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4640,17 +4423,14 @@ async def test_create_tag_template_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag_template_field_id == "tag_template_field_id_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") @pytest.mark.asyncio async def test_create_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4667,7 +4447,7 @@ def test_update_tag_template_field( transport: str = "grpc", request_type=datacatalog.UpdateTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4685,25 +4465,18 @@ def test_update_tag_template_field( is_required=True, order=540, ) - response = client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - assert response.order == 540 @@ -4715,7 +4488,7 @@ def test_update_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4725,7 +4498,6 @@ def test_update_tag_template_field_empty_call(): client.update_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() @@ -4735,7 +4507,7 @@ async def test_update_tag_template_field_async( request_type=datacatalog.UpdateTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4755,24 +4527,18 @@ async def test_update_tag_template_field_async( order=540, ) ) - response = await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - assert response.order == 540 @@ -4782,11 +4548,12 @@ async def test_update_tag_template_field_async_from_dict(): def test_update_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4794,7 +4561,6 @@ def test_update_tag_template_field_field_headers(): type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() - client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4809,11 +4575,12 @@ def test_update_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_update_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4823,7 +4590,6 @@ async def test_update_tag_template_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) - await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -4837,7 +4603,7 @@ async def test_update_tag_template_field_field_headers_async(): def test_update_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4845,29 +4611,25 @@ def test_update_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag_template_field( name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4876,13 +4638,13 @@ def test_update_tag_template_field_flattened_error(): datacatalog.UpdateTagTemplateFieldRequest(), name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4899,24 +4661,21 @@ async def test_update_tag_template_field_flattened_async(): response = await client.update_tag_template_field( name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4925,7 +4684,7 @@ async def test_update_tag_template_field_flattened_error_async(): datacatalog.UpdateTagTemplateFieldRequest(), name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4933,7 +4692,7 @@ def test_rename_tag_template_field( transport: str = "grpc", request_type=datacatalog.RenameTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4951,25 +4710,18 @@ def test_rename_tag_template_field( is_required=True, order=540, ) - response = client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - assert response.order == 540 @@ -4981,7 +4733,7 @@ def test_rename_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4991,7 +4743,6 @@ def test_rename_tag_template_field_empty_call(): client.rename_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() @@ -5001,7 +4752,7 @@ async def test_rename_tag_template_field_async( request_type=datacatalog.RenameTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5021,24 +4772,18 @@ async def test_rename_tag_template_field_async( order=540, ) ) - response = await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_required is True - assert response.order == 540 @@ -5048,11 +4793,12 @@ async def test_rename_tag_template_field_async_from_dict(): def test_rename_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.RenameTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5060,7 +4806,6 @@ def test_rename_tag_template_field_field_headers(): type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() - client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5075,11 +4820,12 @@ def test_rename_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_rename_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.RenameTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5089,7 +4835,6 @@ async def test_rename_tag_template_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) - await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5103,7 +4848,7 @@ async def test_rename_tag_template_field_field_headers_async(): def test_rename_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5111,7 +4856,6 @@ def test_rename_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rename_tag_template_field( @@ -5123,14 +4867,12 @@ def test_rename_tag_template_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].new_tag_template_field_id == "new_tag_template_field_id_value" def test_rename_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5144,7 +4886,7 @@ def test_rename_tag_template_field_flattened_error(): @pytest.mark.asyncio async def test_rename_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5167,15 +4909,13 @@ async def test_rename_tag_template_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].new_tag_template_field_id == "new_tag_template_field_id_value" @pytest.mark.asyncio async def test_rename_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5191,7 +4931,7 @@ def test_delete_tag_template_field( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateFieldRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5204,13 +4944,11 @@ def test_delete_tag_template_field( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. @@ -5225,7 +4963,7 @@ def test_delete_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5235,7 +4973,6 @@ def test_delete_tag_template_field_empty_call(): client.delete_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() @@ -5245,7 +4982,7 @@ async def test_delete_tag_template_field_async( request_type=datacatalog.DeleteTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5258,13 +4995,11 @@ async def test_delete_tag_template_field_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. @@ -5277,11 +5012,12 @@ async def test_delete_tag_template_field_async_from_dict(): def test_delete_tag_template_field_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5289,7 +5025,6 @@ def test_delete_tag_template_field_field_headers(): type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = None - client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5304,11 +5039,12 @@ def test_delete_tag_template_field_field_headers(): @pytest.mark.asyncio async def test_delete_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5316,7 +5052,6 @@ async def test_delete_tag_template_field_field_headers_async(): type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. @@ -5330,7 +5065,7 @@ async def test_delete_tag_template_field_field_headers_async(): def test_delete_tag_template_field_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5338,7 +5073,6 @@ def test_delete_tag_template_field_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag_template_field( @@ -5349,14 +5083,12 @@ def test_delete_tag_template_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True def test_delete_tag_template_field_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5368,7 +5100,7 @@ def test_delete_tag_template_field_flattened_error(): @pytest.mark.asyncio async def test_delete_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5388,15 +5120,13 @@ async def test_delete_tag_template_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].force == True @pytest.mark.asyncio async def test_delete_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5408,7 +5138,7 @@ async def test_delete_tag_template_field_flattened_error_async(): def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5424,23 +5154,17 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR template_display_name="template_display_name_value", column="column_value", ) - response = client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5452,7 +5176,7 @@ def test_create_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5460,7 +5184,6 @@ def test_create_tag_empty_call(): client.create_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() @@ -5469,7 +5192,7 @@ async def test_create_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5486,22 +5209,17 @@ async def test_create_tag_async( template_display_name="template_display_name_value", ) ) - response = await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5511,17 +5229,17 @@ async def test_create_tag_async_from_dict(): def test_create_tag_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = tags.Tag() - client.create_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5536,17 +5254,17 @@ def test_create_tag_field_headers(): @pytest.mark.asyncio async def test_create_tag_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.create_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5560,13 +5278,12 @@ async def test_create_tag_field_headers_async(): def test_create_tag_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag( @@ -5577,14 +5294,12 @@ def test_create_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag == tags.Tag(name="name_value") def test_create_tag_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5598,7 +5313,7 @@ def test_create_tag_flattened_error(): @pytest.mark.asyncio async def test_create_tag_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: @@ -5616,15 +5331,13 @@ async def test_create_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].tag == tags.Tag(name="name_value") @pytest.mark.asyncio async def test_create_tag_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5638,7 +5351,7 @@ async def test_create_tag_flattened_error_async(): def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5654,23 +5367,17 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR template_display_name="template_display_name_value", column="column_value", ) - response = client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5682,7 +5389,7 @@ def test_update_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5690,7 +5397,6 @@ def test_update_tag_empty_call(): client.update_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() @@ -5699,7 +5405,7 @@ async def test_update_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5716,22 +5422,17 @@ async def test_update_tag_async( template_display_name="template_display_name_value", ) ) - response = await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) - assert response.name == "name_value" - assert response.template == "template_value" - assert response.template_display_name == "template_display_name_value" @@ -5741,17 +5442,17 @@ async def test_update_tag_async_from_dict(): def test_update_tag_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagRequest() + request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = tags.Tag() - client.update_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5766,17 +5467,17 @@ def test_update_tag_field_headers(): @pytest.mark.asyncio async def test_update_tag_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagRequest() + request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.update_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5790,32 +5491,29 @@ async def test_update_tag_field_headers_async(): def test_update_tag_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag( tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].tag == tags.Tag(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5823,13 +5521,13 @@ def test_update_tag_flattened_error(): client.update_tag( datacatalog.UpdateTagRequest(), tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: @@ -5841,22 +5539,20 @@ async def test_update_tag_flattened_async(): # using the keyword arguments to the method. response = await client.update_tag( tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].tag == tags.Tag(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5864,13 +5560,13 @@ async def test_update_tag_flattened_error_async(): await client.update_tag( datacatalog.UpdateTagRequest(), tag=tags.Tag(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5881,13 +5577,11 @@ def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagR with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. @@ -5902,7 +5596,7 @@ def test_delete_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5910,7 +5604,6 @@ def test_delete_tag_empty_call(): client.delete_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() @@ -5919,7 +5612,7 @@ async def test_delete_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5930,13 +5623,11 @@ async def test_delete_tag_async( with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. @@ -5949,17 +5640,17 @@ async def test_delete_tag_async_from_dict(): def test_delete_tag_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = None - client.delete_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5974,17 +5665,17 @@ def test_delete_tag_field_headers(): @pytest.mark.asyncio async def test_delete_tag_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. @@ -5998,13 +5689,12 @@ async def test_delete_tag_field_headers_async(): def test_delete_tag_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag(name="name_value",) @@ -6013,12 +5703,11 @@ def test_delete_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_tag_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6030,7 +5719,7 @@ def test_delete_tag_flattened_error(): @pytest.mark.asyncio async def test_delete_tag_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: @@ -6046,13 +5735,12 @@ async def test_delete_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_tag_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6064,7 +5752,7 @@ async def test_delete_tag_flattened_error_async(): def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsRequest): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6077,19 +5765,15 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq call.return_value = datacatalog.ListTagsResponse( next_page_token="next_page_token_value", ) - response = client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsPager) - assert response.next_page_token == "next_page_token_value" @@ -6101,7 +5785,7 @@ def test_list_tags_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6109,7 +5793,6 @@ def test_list_tags_empty_call(): client.list_tags() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() @@ -6118,7 +5801,7 @@ async def test_list_tags_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6131,18 +5814,15 @@ async def test_list_tags_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTagsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -6152,17 +5832,17 @@ async def test_list_tags_async_from_dict(): def test_list_tags_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListTagsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = datacatalog.ListTagsResponse() - client.list_tags(request) # Establish that the underlying gRPC stub method was called. @@ -6177,11 +5857,12 @@ def test_list_tags_field_headers(): @pytest.mark.asyncio async def test_list_tags_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListTagsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -6189,7 +5870,6 @@ async def test_list_tags_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse() ) - await client.list_tags(request) # Establish that the underlying gRPC stub method was called. @@ -6203,13 +5883,12 @@ async def test_list_tags_field_headers_async(): def test_list_tags_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_tags(parent="parent_value",) @@ -6218,12 +5897,11 @@ def test_list_tags_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_tags_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6235,7 +5913,7 @@ def test_list_tags_flattened_error(): @pytest.mark.asyncio async def test_list_tags_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: @@ -6253,13 +5931,12 @@ async def test_list_tags_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_tags_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6270,7 +5947,7 @@ async def test_list_tags_flattened_error_async(): def test_list_tags_pager(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: @@ -6299,7 +5976,7 @@ def test_list_tags_pager(): def test_list_tags_pages(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: @@ -6320,7 +5997,7 @@ def test_list_tags_pages(): @pytest.mark.asyncio async def test_list_tags_async_pager(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6348,7 +6025,7 @@ async def test_list_tags_async_pager(): @pytest.mark.asyncio async def test_list_tags_async_pages(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6372,10 +6049,10 @@ async def test_list_tags_async_pages(): def test_set_iam_policy( - transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest + transport: str = "grpc", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6385,22 +6062,17 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) - + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6412,7 +6084,7 @@ def test_set_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6420,16 +6092,15 @@ def test_set_iam_policy_empty_call(): client.set_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() @pytest.mark.asyncio async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6440,22 +6111,18 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - response = await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6465,17 +6132,17 @@ async def test_set_iam_policy_async_from_dict(): def test_set_iam_policy_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6490,17 +6157,17 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) - + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6514,29 +6181,27 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict_foreign(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() def test_set_iam_policy_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.set_iam_policy(resource="resource_value",) @@ -6545,31 +6210,30 @@ def test_set_iam_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" def test_set_iam_policy_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.set_iam_policy( - iam_policy.SetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value", ) @pytest.mark.asyncio async def test_set_iam_policy_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.set_iam_policy(resource="resource_value",) @@ -6578,27 +6242,26 @@ async def test_set_iam_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" @pytest.mark.asyncio async def test_set_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.set_iam_policy( - iam_policy.SetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value", ) def test_get_iam_policy( - transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest + transport: str = "grpc", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6608,22 +6271,17 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) - + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6635,7 +6293,7 @@ def test_get_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6643,16 +6301,15 @@ def test_get_iam_policy_empty_call(): client.get_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() @pytest.mark.asyncio async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6663,22 +6320,18 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - response = await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -6688,17 +6341,17 @@ async def test_get_iam_policy_async_from_dict(): def test_get_iam_policy_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6713,17 +6366,17 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) - + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -6737,29 +6390,27 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict_foreign(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() def test_get_iam_policy_flattened(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_iam_policy(resource="resource_value",) @@ -6768,31 +6419,30 @@ def test_get_iam_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" def test_get_iam_policy_flattened_error(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_iam_policy( - iam_policy.GetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value", ) @pytest.mark.asyncio async def test_get_iam_policy_flattened_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_iam_policy(resource="resource_value",) @@ -6801,27 +6451,26 @@ async def test_get_iam_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource == "resource_value" @pytest.mark.asyncio async def test_get_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_iam_policy( - iam_policy.GetIamPolicyRequest(), resource="resource_value", + iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value", ) def test_test_iam_permissions( - transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest + transport: str = "grpc", request_type=iam_policy_pb2.TestIamPermissionsRequest ): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6833,22 +6482,18 @@ def test_test_iam_permissions( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) - response = client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, iam_policy.TestIamPermissionsResponse) - + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -6860,7 +6505,7 @@ def test_test_iam_permissions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6870,16 +6515,16 @@ def test_test_iam_permissions_empty_call(): client.test_iam_permissions() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() @pytest.mark.asyncio async def test_test_iam_permissions_async( - transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, ): client = DataCatalogAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6892,20 +6537,19 @@ async def test_test_iam_permissions_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) - response = await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) - + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -6915,19 +6559,19 @@ async def test_test_iam_permissions_async_from_dict(): def test_test_iam_permissions_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() - + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. @@ -6942,11 +6586,12 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -6954,9 +6599,8 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) - await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. @@ -6970,14 +6614,13 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict_foreign(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() - + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ "resource": "resource_value", @@ -6990,16 +6633,16 @@ def test_test_iam_permissions_from_dict_foreign(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( @@ -7009,7 +6652,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( @@ -7020,7 +6663,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DataCatalogClient(transport=transport) assert client.transport is transport @@ -7029,13 +6672,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DataCatalogGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DataCatalogGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -7047,23 +6690,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) + client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DataCatalogGrpcTransport,) def test_data_catalog_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DataCatalogTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -7075,7 +6718,7 @@ def test_data_catalog_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DataCatalogTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -7114,15 +6757,37 @@ def test_data_catalog_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_data_catalog_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_data_catalog_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -7135,19 +6800,33 @@ def test_data_catalog_base_transport_with_credentials_file(): def test_data_catalog_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_data_catalog_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataCatalogClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_data_catalog_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataCatalogClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -7155,26 +6834,156 @@ def test_data_catalog_auth_adc(): ) -def test_data_catalog_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], +) +@requires_google_auth_gte_1_25_0 +def test_data_catalog_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], +) +@requires_google_auth_lt_1_25_0 +def test_data_catalog_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_data_catalog_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_data_catalog_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) def test_data_catalog_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -7213,7 +7022,7 @@ def test_data_catalog_grpc_transport_client_cert_source_for_mtls(transport_class def test_data_catalog_host_no_port(): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com" ), @@ -7223,7 +7032,7 @@ def test_data_catalog_host_no_port(): def test_data_catalog_host_with_port(): client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com:8000" ), @@ -7274,9 +7083,9 @@ def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_c mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -7352,7 +7161,6 @@ def test_entry_path(): location = "clam" entry_group = "whelk" entry = "octopus" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( project=project, location=location, entry_group=entry_group, entry=entry, ) @@ -7378,7 +7186,6 @@ def test_entry_group_path(): project = "winkle" location = "nautilus" entry_group = "scallop" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( project=project, location=location, entry_group=entry_group, ) @@ -7405,7 +7212,6 @@ def test_tag_path(): entry_group = "oyster" entry = "nudibranch" tag = "cuttlefish" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( project=project, location=location, @@ -7436,7 +7242,6 @@ def test_tag_template_path(): project = "squid" location = "clam" tag_template = "whelk" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( project=project, location=location, tag_template=tag_template, ) @@ -7462,7 +7267,6 @@ def test_tag_template_field_path(): location = "mussel" tag_template = "winkle" field = "nautilus" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, ) @@ -7488,7 +7292,6 @@ def test_parse_tag_template_field_path(): def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -7509,7 +7312,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) actual = DataCatalogClient.common_folder_path(folder) assert expected == actual @@ -7528,7 +7330,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) actual = DataCatalogClient.common_organization_path(organization) assert expected == actual @@ -7547,7 +7348,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) actual = DataCatalogClient.common_project_path(project) assert expected == actual @@ -7567,7 +7367,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -7594,7 +7393,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DataCatalogTransport, "_prep_wrapped_messages" ) as prep: client = DataCatalogClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -7603,6 +7402,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DataCatalogClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index 4b073a35..2a2d94a1 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import ( PolicyTagManagerAsyncClient, @@ -40,13 +39,43 @@ ) from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import transports +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.type import expr_pb2 as expr # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -98,7 +127,7 @@ def test__get_default_mtls_endpoint(): "client_class", [PolicyTagManagerClient, PolicyTagManagerAsyncClient,] ) def test_policy_tag_manager_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -115,7 +144,7 @@ def test_policy_tag_manager_client_from_service_account_info(client_class): "client_class", [PolicyTagManagerClient, PolicyTagManagerAsyncClient,] ) def test_policy_tag_manager_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -168,7 +197,7 @@ def test_policy_tag_manager_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(PolicyTagManagerClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -466,7 +495,7 @@ def test_create_taxonomy( transport: str = "grpc", request_type=policytagmanager.CreateTaxonomyRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -484,25 +513,18 @@ def test_create_taxonomy( policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ], ) - response = client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.activated_policy_types == [ policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ] @@ -516,7 +538,7 @@ def test_create_taxonomy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -524,7 +546,6 @@ def test_create_taxonomy_empty_call(): client.create_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() @@ -533,7 +554,7 @@ async def test_create_taxonomy_async( transport: str = "grpc_asyncio", request_type=policytagmanager.CreateTaxonomyRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -553,24 +574,18 @@ async def test_create_taxonomy_async( ], ) ) - response = await client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.activated_policy_types == [ policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ] @@ -582,17 +597,17 @@ async def test_create_taxonomy_async_from_dict(): def test_create_taxonomy_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.CreateTaxonomyRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: call.return_value = policytagmanager.Taxonomy() - client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -608,12 +623,13 @@ def test_create_taxonomy_field_headers(): @pytest.mark.asyncio async def test_create_taxonomy_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.CreateTaxonomyRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -621,7 +637,6 @@ async def test_create_taxonomy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy() ) - await client.create_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -635,13 +650,12 @@ async def test_create_taxonomy_field_headers_async(): def test_create_taxonomy_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_taxonomy( @@ -653,14 +667,12 @@ def test_create_taxonomy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") def test_create_taxonomy_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -675,7 +687,7 @@ def test_create_taxonomy_flattened_error(): @pytest.mark.asyncio async def test_create_taxonomy_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -697,16 +709,14 @@ async def test_create_taxonomy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") @pytest.mark.asyncio async def test_create_taxonomy_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -723,7 +733,7 @@ def test_delete_taxonomy( transport: str = "grpc", request_type=policytagmanager.DeleteTaxonomyRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -734,13 +744,11 @@ def test_delete_taxonomy( with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() # Establish that the response is the type that we expect. @@ -755,7 +763,7 @@ def test_delete_taxonomy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -763,7 +771,6 @@ def test_delete_taxonomy_empty_call(): client.delete_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() @@ -772,7 +779,7 @@ async def test_delete_taxonomy_async( transport: str = "grpc_asyncio", request_type=policytagmanager.DeleteTaxonomyRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -783,13 +790,11 @@ async def test_delete_taxonomy_async( with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() # Establish that the response is the type that we expect. @@ -802,17 +807,17 @@ async def test_delete_taxonomy_async_from_dict(): def test_delete_taxonomy_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.DeleteTaxonomyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: call.return_value = None - client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -828,18 +833,18 @@ def test_delete_taxonomy_field_headers(): @pytest.mark.asyncio async def test_delete_taxonomy_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.DeleteTaxonomyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -853,13 +858,12 @@ async def test_delete_taxonomy_field_headers_async(): def test_delete_taxonomy_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_taxonomy(name="name_value",) @@ -868,12 +872,11 @@ def test_delete_taxonomy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_taxonomy_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -886,7 +889,7 @@ def test_delete_taxonomy_flattened_error(): @pytest.mark.asyncio async def test_delete_taxonomy_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -903,14 +906,13 @@ async def test_delete_taxonomy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_taxonomy_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -925,7 +927,7 @@ def test_update_taxonomy( transport: str = "grpc", request_type=policytagmanager.UpdateTaxonomyRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -943,25 +945,18 @@ def test_update_taxonomy( policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ], ) - response = client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.activated_policy_types == [ policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ] @@ -975,7 +970,7 @@ def test_update_taxonomy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -983,7 +978,6 @@ def test_update_taxonomy_empty_call(): client.update_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() @@ -992,7 +986,7 @@ async def test_update_taxonomy_async( transport: str = "grpc_asyncio", request_type=policytagmanager.UpdateTaxonomyRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1012,24 +1006,18 @@ async def test_update_taxonomy_async( ], ) ) - response = await client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.activated_policy_types == [ policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ] @@ -1041,17 +1029,17 @@ async def test_update_taxonomy_async_from_dict(): def test_update_taxonomy_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.UpdateTaxonomyRequest() + request.taxonomy.name = "taxonomy.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: call.return_value = policytagmanager.Taxonomy() - client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -1069,12 +1057,13 @@ def test_update_taxonomy_field_headers(): @pytest.mark.asyncio async def test_update_taxonomy_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.UpdateTaxonomyRequest() + request.taxonomy.name = "taxonomy.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1082,7 +1071,6 @@ async def test_update_taxonomy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy() ) - await client.update_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -1098,13 +1086,12 @@ async def test_update_taxonomy_field_headers_async(): def test_update_taxonomy_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_taxonomy(taxonomy=policytagmanager.Taxonomy(name="name_value"),) @@ -1113,12 +1100,11 @@ def test_update_taxonomy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") def test_update_taxonomy_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1132,7 +1118,7 @@ def test_update_taxonomy_flattened_error(): @pytest.mark.asyncio async def test_update_taxonomy_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1153,14 +1139,13 @@ async def test_update_taxonomy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].taxonomy == policytagmanager.Taxonomy(name="name_value") @pytest.mark.asyncio async def test_update_taxonomy_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1176,7 +1161,7 @@ def test_list_taxonomies( transport: str = "grpc", request_type=policytagmanager.ListTaxonomiesRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1189,19 +1174,15 @@ def test_list_taxonomies( call.return_value = policytagmanager.ListTaxonomiesResponse( next_page_token="next_page_token_value", ) - response = client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTaxonomiesPager) - assert response.next_page_token == "next_page_token_value" @@ -1213,7 +1194,7 @@ def test_list_taxonomies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1221,7 +1202,6 @@ def test_list_taxonomies_empty_call(): client.list_taxonomies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() @@ -1230,7 +1210,7 @@ async def test_list_taxonomies_async( transport: str = "grpc_asyncio", request_type=policytagmanager.ListTaxonomiesRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1245,18 +1225,15 @@ async def test_list_taxonomies_async( next_page_token="next_page_token_value", ) ) - response = await client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTaxonomiesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1266,17 +1243,17 @@ async def test_list_taxonomies_async_from_dict(): def test_list_taxonomies_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.ListTaxonomiesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: call.return_value = policytagmanager.ListTaxonomiesResponse() - client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. @@ -1292,12 +1269,13 @@ def test_list_taxonomies_field_headers(): @pytest.mark.asyncio async def test_list_taxonomies_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.ListTaxonomiesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1305,7 +1283,6 @@ async def test_list_taxonomies_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.ListTaxonomiesResponse() ) - await client.list_taxonomies(request) # Establish that the underlying gRPC stub method was called. @@ -1319,13 +1296,12 @@ async def test_list_taxonomies_field_headers_async(): def test_list_taxonomies_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListTaxonomiesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_taxonomies(parent="parent_value",) @@ -1334,12 +1310,11 @@ def test_list_taxonomies_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_taxonomies_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1352,7 +1327,7 @@ def test_list_taxonomies_flattened_error(): @pytest.mark.asyncio async def test_list_taxonomies_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1371,14 +1346,13 @@ async def test_list_taxonomies_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_taxonomies_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1390,7 +1364,7 @@ async def test_list_taxonomies_flattened_error_async(): def test_list_taxonomies_pager(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: @@ -1430,7 +1404,7 @@ def test_list_taxonomies_pager(): def test_list_taxonomies_pages(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: @@ -1462,7 +1436,9 @@ def test_list_taxonomies_pages(): @pytest.mark.asyncio async def test_list_taxonomies_async_pager(): - client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1501,7 +1477,9 @@ async def test_list_taxonomies_async_pager(): @pytest.mark.asyncio async def test_list_taxonomies_async_pages(): - client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1539,7 +1517,7 @@ def test_get_taxonomy( transport: str = "grpc", request_type=policytagmanager.GetTaxonomyRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1557,25 +1535,18 @@ def test_get_taxonomy( policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ], ) - response = client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.activated_policy_types == [ policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ] @@ -1589,7 +1560,7 @@ def test_get_taxonomy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1597,7 +1568,6 @@ def test_get_taxonomy_empty_call(): client.get_taxonomy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() @@ -1606,7 +1576,7 @@ async def test_get_taxonomy_async( transport: str = "grpc_asyncio", request_type=policytagmanager.GetTaxonomyRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1626,24 +1596,18 @@ async def test_get_taxonomy_async( ], ) ) - response = await client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.activated_policy_types == [ policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL ] @@ -1655,17 +1619,17 @@ async def test_get_taxonomy_async_from_dict(): def test_get_taxonomy_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.GetTaxonomyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: call.return_value = policytagmanager.Taxonomy() - client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -1681,12 +1645,13 @@ def test_get_taxonomy_field_headers(): @pytest.mark.asyncio async def test_get_taxonomy_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.GetTaxonomyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1694,7 +1659,6 @@ async def test_get_taxonomy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy() ) - await client.get_taxonomy(request) # Establish that the underlying gRPC stub method was called. @@ -1708,13 +1672,12 @@ async def test_get_taxonomy_field_headers_async(): def test_get_taxonomy_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_taxonomy(name="name_value",) @@ -1723,12 +1686,11 @@ def test_get_taxonomy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_taxonomy_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1741,7 +1703,7 @@ def test_get_taxonomy_flattened_error(): @pytest.mark.asyncio async def test_get_taxonomy_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1760,14 +1722,13 @@ async def test_get_taxonomy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_taxonomy_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1782,7 +1743,7 @@ def test_create_policy_tag( transport: str = "grpc", request_type=policytagmanager.CreatePolicyTagRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1801,27 +1762,19 @@ def test_create_policy_tag( parent_policy_tag="parent_policy_tag_value", child_policy_tags=["child_policy_tags_value"], ) - response = client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.parent_policy_tag == "parent_policy_tag_value" - assert response.child_policy_tags == ["child_policy_tags_value"] @@ -1833,7 +1786,7 @@ def test_create_policy_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1843,7 +1796,6 @@ def test_create_policy_tag_empty_call(): client.create_policy_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() @@ -1853,7 +1805,7 @@ async def test_create_policy_tag_async( request_type=policytagmanager.CreatePolicyTagRequest, ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1874,26 +1826,19 @@ async def test_create_policy_tag_async( child_policy_tags=["child_policy_tags_value"], ) ) - response = await client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.parent_policy_tag == "parent_policy_tag_value" - assert response.child_policy_tags == ["child_policy_tags_value"] @@ -1903,11 +1848,12 @@ async def test_create_policy_tag_async_from_dict(): def test_create_policy_tag_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.CreatePolicyTagRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1915,7 +1861,6 @@ def test_create_policy_tag_field_headers(): type(client.transport.create_policy_tag), "__call__" ) as call: call.return_value = policytagmanager.PolicyTag() - client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -1931,12 +1876,13 @@ def test_create_policy_tag_field_headers(): @pytest.mark.asyncio async def test_create_policy_tag_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.CreatePolicyTagRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1946,7 +1892,6 @@ async def test_create_policy_tag_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag() ) - await client.create_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -1960,7 +1905,7 @@ async def test_create_policy_tag_field_headers_async(): def test_create_policy_tag_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1968,7 +1913,6 @@ def test_create_policy_tag_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_policy_tag( @@ -1980,14 +1924,12 @@ def test_create_policy_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") def test_create_policy_tag_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2002,7 +1944,7 @@ def test_create_policy_tag_flattened_error(): @pytest.mark.asyncio async def test_create_policy_tag_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2026,16 +1968,14 @@ async def test_create_policy_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") @pytest.mark.asyncio async def test_create_policy_tag_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2052,7 +1992,7 @@ def test_delete_policy_tag( transport: str = "grpc", request_type=policytagmanager.DeletePolicyTagRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2065,13 +2005,11 @@ def test_delete_policy_tag( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() # Establish that the response is the type that we expect. @@ -2086,7 +2024,7 @@ def test_delete_policy_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2096,7 +2034,6 @@ def test_delete_policy_tag_empty_call(): client.delete_policy_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() @@ -2106,7 +2043,7 @@ async def test_delete_policy_tag_async( request_type=policytagmanager.DeletePolicyTagRequest, ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2119,13 +2056,11 @@ async def test_delete_policy_tag_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() # Establish that the response is the type that we expect. @@ -2138,11 +2073,12 @@ async def test_delete_policy_tag_async_from_dict(): def test_delete_policy_tag_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.DeletePolicyTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2150,7 +2086,6 @@ def test_delete_policy_tag_field_headers(): type(client.transport.delete_policy_tag), "__call__" ) as call: call.return_value = None - client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -2166,12 +2101,13 @@ def test_delete_policy_tag_field_headers(): @pytest.mark.asyncio async def test_delete_policy_tag_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.DeletePolicyTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2179,7 +2115,6 @@ async def test_delete_policy_tag_field_headers_async(): type(client.transport.delete_policy_tag), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -2193,7 +2128,7 @@ async def test_delete_policy_tag_field_headers_async(): def test_delete_policy_tag_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2201,7 +2136,6 @@ def test_delete_policy_tag_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_policy_tag(name="name_value",) @@ -2210,12 +2144,11 @@ def test_delete_policy_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_policy_tag_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2228,7 +2161,7 @@ def test_delete_policy_tag_flattened_error(): @pytest.mark.asyncio async def test_delete_policy_tag_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2247,14 +2180,13 @@ async def test_delete_policy_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_policy_tag_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2269,7 +2201,7 @@ def test_update_policy_tag( transport: str = "grpc", request_type=policytagmanager.UpdatePolicyTagRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2288,27 +2220,19 @@ def test_update_policy_tag( parent_policy_tag="parent_policy_tag_value", child_policy_tags=["child_policy_tags_value"], ) - response = client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.parent_policy_tag == "parent_policy_tag_value" - assert response.child_policy_tags == ["child_policy_tags_value"] @@ -2320,7 +2244,7 @@ def test_update_policy_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2330,7 +2254,6 @@ def test_update_policy_tag_empty_call(): client.update_policy_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() @@ -2340,7 +2263,7 @@ async def test_update_policy_tag_async( request_type=policytagmanager.UpdatePolicyTagRequest, ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2361,26 +2284,19 @@ async def test_update_policy_tag_async( child_policy_tags=["child_policy_tags_value"], ) ) - response = await client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.parent_policy_tag == "parent_policy_tag_value" - assert response.child_policy_tags == ["child_policy_tags_value"] @@ -2390,11 +2306,12 @@ async def test_update_policy_tag_async_from_dict(): def test_update_policy_tag_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.UpdatePolicyTagRequest() + request.policy_tag.name = "policy_tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2402,7 +2319,6 @@ def test_update_policy_tag_field_headers(): type(client.transport.update_policy_tag), "__call__" ) as call: call.return_value = policytagmanager.PolicyTag() - client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -2420,12 +2336,13 @@ def test_update_policy_tag_field_headers(): @pytest.mark.asyncio async def test_update_policy_tag_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.UpdatePolicyTagRequest() + request.policy_tag.name = "policy_tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2435,7 +2352,6 @@ async def test_update_policy_tag_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag() ) - await client.update_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -2451,7 +2367,7 @@ async def test_update_policy_tag_field_headers_async(): def test_update_policy_tag_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2459,7 +2375,6 @@ def test_update_policy_tag_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_policy_tag( @@ -2470,12 +2385,11 @@ def test_update_policy_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") def test_update_policy_tag_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2489,7 +2403,7 @@ def test_update_policy_tag_flattened_error(): @pytest.mark.asyncio async def test_update_policy_tag_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2512,14 +2426,13 @@ async def test_update_policy_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].policy_tag == policytagmanager.PolicyTag(name="name_value") @pytest.mark.asyncio async def test_update_policy_tag_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2535,7 +2448,7 @@ def test_list_policy_tags( transport: str = "grpc", request_type=policytagmanager.ListPolicyTagsRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2548,19 +2461,15 @@ def test_list_policy_tags( call.return_value = policytagmanager.ListPolicyTagsResponse( next_page_token="next_page_token_value", ) - response = client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPolicyTagsPager) - assert response.next_page_token == "next_page_token_value" @@ -2572,7 +2481,7 @@ def test_list_policy_tags_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2580,7 +2489,6 @@ def test_list_policy_tags_empty_call(): client.list_policy_tags() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() @@ -2589,7 +2497,7 @@ async def test_list_policy_tags_async( transport: str = "grpc_asyncio", request_type=policytagmanager.ListPolicyTagsRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2604,18 +2512,15 @@ async def test_list_policy_tags_async( next_page_token="next_page_token_value", ) ) - response = await client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPolicyTagsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -2625,17 +2530,17 @@ async def test_list_policy_tags_async_from_dict(): def test_list_policy_tags_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.ListPolicyTagsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: call.return_value = policytagmanager.ListPolicyTagsResponse() - client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. @@ -2651,12 +2556,13 @@ def test_list_policy_tags_field_headers(): @pytest.mark.asyncio async def test_list_policy_tags_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.ListPolicyTagsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2664,7 +2570,6 @@ async def test_list_policy_tags_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.ListPolicyTagsResponse() ) - await client.list_policy_tags(request) # Establish that the underlying gRPC stub method was called. @@ -2678,13 +2583,12 @@ async def test_list_policy_tags_field_headers_async(): def test_list_policy_tags_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListPolicyTagsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_policy_tags(parent="parent_value",) @@ -2693,12 +2597,11 @@ def test_list_policy_tags_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_policy_tags_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2711,7 +2614,7 @@ def test_list_policy_tags_flattened_error(): @pytest.mark.asyncio async def test_list_policy_tags_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2730,14 +2633,13 @@ async def test_list_policy_tags_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_policy_tags_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2749,7 +2651,7 @@ async def test_list_policy_tags_flattened_error_async(): def test_list_policy_tags_pager(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: @@ -2792,7 +2694,7 @@ def test_list_policy_tags_pager(): def test_list_policy_tags_pages(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: @@ -2827,7 +2729,9 @@ def test_list_policy_tags_pages(): @pytest.mark.asyncio async def test_list_policy_tags_async_pager(): - client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2869,7 +2773,9 @@ async def test_list_policy_tags_async_pager(): @pytest.mark.asyncio async def test_list_policy_tags_async_pages(): - client = PolicyTagManagerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2910,7 +2816,7 @@ def test_get_policy_tag( transport: str = "grpc", request_type=policytagmanager.GetPolicyTagRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2927,27 +2833,19 @@ def test_get_policy_tag( parent_policy_tag="parent_policy_tag_value", child_policy_tags=["child_policy_tags_value"], ) - response = client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.parent_policy_tag == "parent_policy_tag_value" - assert response.child_policy_tags == ["child_policy_tags_value"] @@ -2959,7 +2857,7 @@ def test_get_policy_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2967,7 +2865,6 @@ def test_get_policy_tag_empty_call(): client.get_policy_tag() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() @@ -2976,7 +2873,7 @@ async def test_get_policy_tag_async( transport: str = "grpc_asyncio", request_type=policytagmanager.GetPolicyTagRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2995,26 +2892,19 @@ async def test_get_policy_tag_async( child_policy_tags=["child_policy_tags_value"], ) ) - response = await client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.parent_policy_tag == "parent_policy_tag_value" - assert response.child_policy_tags == ["child_policy_tags_value"] @@ -3024,17 +2914,17 @@ async def test_get_policy_tag_async_from_dict(): def test_get_policy_tag_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.GetPolicyTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: call.return_value = policytagmanager.PolicyTag() - client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -3050,12 +2940,13 @@ def test_get_policy_tag_field_headers(): @pytest.mark.asyncio async def test_get_policy_tag_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanager.GetPolicyTagRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3063,7 +2954,6 @@ async def test_get_policy_tag_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag() ) - await client.get_policy_tag(request) # Establish that the underlying gRPC stub method was called. @@ -3077,13 +2967,12 @@ async def test_get_policy_tag_field_headers_async(): def test_get_policy_tag_flattened(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_policy_tag(name="name_value",) @@ -3092,12 +2981,11 @@ def test_get_policy_tag_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_policy_tag_flattened_error(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3110,7 +2998,7 @@ def test_get_policy_tag_flattened_error(): @pytest.mark.asyncio async def test_get_policy_tag_flattened_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3129,14 +3017,13 @@ async def test_get_policy_tag_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_policy_tag_flattened_error_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3148,10 +3035,10 @@ async def test_get_policy_tag_flattened_error_async(): def test_get_iam_policy( - transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest + transport: str = "grpc", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3161,22 +3048,17 @@ def test_get_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) - + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -3188,7 +3070,7 @@ def test_get_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3196,16 +3078,15 @@ def test_get_iam_policy_empty_call(): client.get_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() @pytest.mark.asyncio async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3216,22 +3097,18 @@ async def test_get_iam_policy_async( with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - response = await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.GetIamPolicyRequest() + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -3241,17 +3118,17 @@ async def test_get_iam_policy_async_from_dict(): def test_get_iam_policy_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -3267,18 +3144,18 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) - + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -3292,26 +3169,25 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict_foreign(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() def test_set_iam_policy( - transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest + transport: str = "grpc", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3321,22 +3197,17 @@ def test_set_iam_policy( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) - + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -3348,7 +3219,7 @@ def test_set_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3356,16 +3227,15 @@ def test_set_iam_policy_empty_call(): client.set_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() @pytest.mark.asyncio async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3376,22 +3246,18 @@ async def test_set_iam_policy_async( with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) - response = await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.SetIamPolicyRequest() + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) - + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 - assert response.etag == b"etag_blob" @@ -3401,17 +3267,17 @@ async def test_set_iam_policy_async_from_dict(): def test_set_iam_policy_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -3427,18 +3293,18 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) - + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -3452,26 +3318,25 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict_foreign(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() - + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() def test_test_iam_permissions( - transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest + transport: str = "grpc", request_type=iam_policy_pb2.TestIamPermissionsRequest ): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3483,22 +3348,18 @@ def test_test_iam_permissions( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) - response = client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. - - assert isinstance(response, iam_policy.TestIamPermissionsResponse) - + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -3510,7 +3371,7 @@ def test_test_iam_permissions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3520,16 +3381,16 @@ def test_test_iam_permissions_empty_call(): client.test_iam_permissions() call.assert_called() _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() @pytest.mark.asyncio async def test_test_iam_permissions_async( - transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, ): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3542,20 +3403,19 @@ async def test_test_iam_permissions_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) - response = await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - - assert args[0] == iam_policy.TestIamPermissionsRequest() + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) - + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -3565,19 +3425,19 @@ async def test_test_iam_permissions_async_from_dict(): def test_test_iam_permissions_field_headers(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() - + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. @@ -3593,12 +3453,13 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = PolicyTagManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3606,9 +3467,8 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) - await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. @@ -3622,14 +3482,13 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict_foreign(): - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() - + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ "resource": "resource_value", @@ -3642,16 +3501,16 @@ def test_test_iam_permissions_from_dict_foreign(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.PolicyTagManagerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.PolicyTagManagerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PolicyTagManagerClient( @@ -3661,7 +3520,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.PolicyTagManagerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PolicyTagManagerClient( @@ -3672,7 +3531,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.PolicyTagManagerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = PolicyTagManagerClient(transport=transport) assert client.transport is transport @@ -3681,13 +3540,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.PolicyTagManagerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -3702,23 +3561,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) + client = PolicyTagManagerClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.PolicyTagManagerGrpcTransport,) def test_policy_tag_manager_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.PolicyTagManagerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -3730,7 +3589,7 @@ def test_policy_tag_manager_base_transport(): ) as Transport: Transport.return_value = None transport = transports.PolicyTagManagerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -3755,15 +3614,37 @@ def test_policy_tag_manager_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_policy_tag_manager_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PolicyTagManagerTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -3776,19 +3657,33 @@ def test_policy_tag_manager_base_transport_with_credentials_file(): def test_policy_tag_manager_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PolicyTagManagerTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_policy_tag_manager_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) PolicyTagManagerClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -3796,20 +3691,156 @@ def test_policy_tag_manager_auth_adc(): ) -def test_policy_tag_manager_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.PolicyTagManagerGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_policy_tag_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -3818,7 +3849,7 @@ def test_policy_tag_manager_transport_auth_adc(): ], ) def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -3857,7 +3888,7 @@ def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls(transport def test_policy_tag_manager_host_no_port(): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com" ), @@ -3867,7 +3898,7 @@ def test_policy_tag_manager_host_no_port(): def test_policy_tag_manager_host_with_port(): client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com:8000" ), @@ -3923,9 +3954,9 @@ def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -4004,7 +4035,6 @@ def test_policy_tag_path(): location = "clam" taxonomy = "whelk" policy_tag = "octopus" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) @@ -4032,7 +4062,6 @@ def test_taxonomy_path(): project = "winkle" location = "nautilus" taxonomy = "scallop" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( project=project, location=location, taxonomy=taxonomy, ) @@ -4055,7 +4084,6 @@ def test_parse_taxonomy_path(): def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4076,7 +4104,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) actual = PolicyTagManagerClient.common_folder_path(folder) assert expected == actual @@ -4095,7 +4122,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) actual = PolicyTagManagerClient.common_organization_path(organization) assert expected == actual @@ -4114,7 +4140,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) actual = PolicyTagManagerClient.common_project_path(project) assert expected == actual @@ -4134,7 +4159,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -4161,7 +4185,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PolicyTagManagerTransport, "_prep_wrapped_messages" ) as prep: client = PolicyTagManagerClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4170,6 +4194,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PolicyTagManagerClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index 8a3ccf97..cd07138f 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import ( PolicyTagManagerSerializationAsyncClient, @@ -41,9 +40,39 @@ from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import ( transports, ) +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization from google.oauth2 import service_account +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -102,7 +131,7 @@ def test__get_default_mtls_endpoint(): def test_policy_tag_manager_serialization_client_from_service_account_info( client_class, ): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -122,7 +151,7 @@ def test_policy_tag_manager_serialization_client_from_service_account_info( def test_policy_tag_manager_serialization_client_from_service_account_file( client_class, ): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -181,7 +210,7 @@ def test_policy_tag_manager_serialization_client_client_options( with mock.patch.object( PolicyTagManagerSerializationClient, "get_transport_class" ) as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -490,7 +519,7 @@ def test_import_taxonomies( request_type=policytagmanagerserialization.ImportTaxonomiesRequest, ): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -503,17 +532,14 @@ def test_import_taxonomies( ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() - response = client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) @@ -525,7 +551,7 @@ def test_import_taxonomies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -535,7 +561,6 @@ def test_import_taxonomies_empty_call(): client.import_taxonomies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() @@ -545,7 +570,7 @@ async def test_import_taxonomies_async( request_type=policytagmanagerserialization.ImportTaxonomiesRequest, ): client = PolicyTagManagerSerializationAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -560,13 +585,11 @@ async def test_import_taxonomies_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanagerserialization.ImportTaxonomiesResponse() ) - response = await client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() # Establish that the response is the type that we expect. @@ -580,12 +603,13 @@ async def test_import_taxonomies_async_from_dict(): def test_import_taxonomies_field_headers(): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanagerserialization.ImportTaxonomiesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -593,7 +617,6 @@ def test_import_taxonomies_field_headers(): type(client.transport.import_taxonomies), "__call__" ) as call: call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() - client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. @@ -609,12 +632,13 @@ def test_import_taxonomies_field_headers(): @pytest.mark.asyncio async def test_import_taxonomies_field_headers_async(): client = PolicyTagManagerSerializationAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanagerserialization.ImportTaxonomiesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -624,7 +648,6 @@ async def test_import_taxonomies_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanagerserialization.ImportTaxonomiesResponse() ) - await client.import_taxonomies(request) # Establish that the underlying gRPC stub method was called. @@ -642,7 +665,7 @@ def test_export_taxonomies( request_type=policytagmanagerserialization.ExportTaxonomiesRequest, ): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -655,17 +678,14 @@ def test_export_taxonomies( ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() - response = client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) @@ -677,7 +697,7 @@ def test_export_taxonomies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -687,7 +707,6 @@ def test_export_taxonomies_empty_call(): client.export_taxonomies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() @@ -697,7 +716,7 @@ async def test_export_taxonomies_async( request_type=policytagmanagerserialization.ExportTaxonomiesRequest, ): client = PolicyTagManagerSerializationAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -712,13 +731,11 @@ async def test_export_taxonomies_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanagerserialization.ExportTaxonomiesResponse() ) - response = await client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() # Establish that the response is the type that we expect. @@ -732,12 +749,13 @@ async def test_export_taxonomies_async_from_dict(): def test_export_taxonomies_field_headers(): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanagerserialization.ExportTaxonomiesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -745,7 +763,6 @@ def test_export_taxonomies_field_headers(): type(client.transport.export_taxonomies), "__call__" ) as call: call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() - client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. @@ -761,12 +778,13 @@ def test_export_taxonomies_field_headers(): @pytest.mark.asyncio async def test_export_taxonomies_field_headers_async(): client = PolicyTagManagerSerializationAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = policytagmanagerserialization.ExportTaxonomiesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -776,7 +794,6 @@ async def test_export_taxonomies_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanagerserialization.ExportTaxonomiesResponse() ) - await client.export_taxonomies(request) # Establish that the underlying gRPC stub method was called. @@ -792,16 +809,16 @@ async def test_export_taxonomies_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PolicyTagManagerSerializationClient( @@ -811,7 +828,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PolicyTagManagerSerializationClient( @@ -822,7 +839,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = PolicyTagManagerSerializationClient(transport=transport) assert client.transport is transport @@ -831,13 +848,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -852,8 +869,8 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -861,7 +878,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, transports.PolicyTagManagerSerializationGrpcTransport, @@ -870,9 +887,9 @@ def test_transport_grpc_default(): def test_policy_tag_manager_serialization_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.PolicyTagManagerSerializationTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -884,7 +901,7 @@ def test_policy_tag_manager_serialization_base_transport(): ) as Transport: Transport.return_value = None transport = transports.PolicyTagManagerSerializationTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -898,15 +915,37 @@ def test_policy_tag_manager_serialization_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_serialization_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PolicyTagManagerSerializationTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -919,19 +958,33 @@ def test_policy_tag_manager_serialization_base_transport_with_credentials_file() def test_policy_tag_manager_serialization_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PolicyTagManagerSerializationTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_policy_tag_manager_serialization_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerSerializationClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_serialization_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) PolicyTagManagerSerializationClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -939,20 +992,169 @@ def test_policy_tag_manager_serialization_auth_adc(): ) -def test_policy_tag_manager_serialization_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_policy_tag_manager_serialization_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.PolicyTagManagerSerializationGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_policy_tag_manager_serialization_transport_auth_adc_old_google_auth( + transport_class, +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + ( + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +@requires_api_core_gte_1_26_0 +def test_policy_tag_manager_serialization_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + ( + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_serialization_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + ( + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +@requires_api_core_lt_1_26_0 +def test_policy_tag_manager_serialization_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -963,7 +1165,7 @@ def test_policy_tag_manager_serialization_transport_auth_adc(): def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( transport_class, ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1002,7 +1204,7 @@ def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_ def test_policy_tag_manager_serialization_host_no_port(): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com" ), @@ -1012,7 +1214,7 @@ def test_policy_tag_manager_serialization_host_no_port(): def test_policy_tag_manager_serialization_host_with_port(): client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com:8000" ), @@ -1068,9 +1270,9 @@ def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cer mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -1150,7 +1352,6 @@ def test_taxonomy_path(): project = "squid" location = "clam" taxonomy = "whelk" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( project=project, location=location, taxonomy=taxonomy, ) @@ -1175,7 +1376,6 @@ def test_parse_taxonomy_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1198,7 +1398,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = PolicyTagManagerSerializationClient.common_folder_path(folder) assert expected == actual @@ -1217,7 +1416,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = PolicyTagManagerSerializationClient.common_organization_path(organization) assert expected == actual @@ -1236,7 +1434,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = PolicyTagManagerSerializationClient.common_project_path(project) assert expected == actual @@ -1256,7 +1453,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -1283,7 +1479,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" ) as prep: client = PolicyTagManagerSerializationClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1292,6 +1488,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PolicyTagManagerSerializationClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From 936800c92a348b8ce438c4b608f17fe6d9f926a1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 18 May 2021 09:29:34 -0700 Subject: [PATCH 17/17] chore: release 3.2.0 (#161) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 12 ++++++++++++ setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a6672a63..22f2a339 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## [3.2.0](https://www.github.com/googleapis/python-datacatalog/compare/v3.1.1...v3.2.0) (2021-05-18) + + +### Features + +* support self-signed JWT flow for service accounts ([85e46e1](https://www.github.com/googleapis/python-datacatalog/commit/85e46e144d32a0d66bc2d7c056453951eb77d592)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([85e46e1](https://www.github.com/googleapis/python-datacatalog/commit/85e46e144d32a0d66bc2d7c056453951eb77d592)) + ### [3.1.1](https://www.github.com/googleapis/python-datacatalog/compare/v3.1.0...v3.1.1) (2021-03-29) diff --git a/setup.py b/setup.py index d0af548c..f5e45b3b 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-datacatalog" description = "Google Cloud Data Catalog API API client library" -version = "3.1.1" +version = "3.2.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'