diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000000..da616c91a3 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000000..06350d6316 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,33 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/firestore/v1beta1 + +deep-copy-regex: + - source: /google/firestore/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/firestore/$1/$2 + - source: /google/firestore/admin/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/firestore_admin/$1/$2 + - source: /google/firestore/bundle/(.*-py)/(.*) + dest: /owl-bot-staging/firestore_bundle/$1/$2 + +begin-after-commit-hash: 107ed1217b5e87048263f52cd3911d5f851aca7e + diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml index fc281c05bd..6fe78aa798 100644 --- a/.github/header-checker-lint.yml +++ b/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 32388c2581..3baff3ec9c 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-firestore python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 8905fd5e9d..ed5e035d60 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-firestore/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32302e4883..4f00c7cffc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: @@ -12,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.2 hooks: - id: flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json index 81b0c55d8e..dad92b41a3 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/5337669", "release_level": "ga", "language": "python", + "library_type": "GAPIC_COMBO", "repo": "googleapis/python-firestore", "distribution_name": "google-cloud-firestore", "api_id": "firestore.googleapis.com", diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e397a3d21..b8f606eeca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-firestore/#history +### [2.1.2](https://www.github.com/googleapis/python-firestore/compare/v2.1.1...v2.1.2) (2021-06-14) + + +### Documentation + +* fix broken links in multiprocessing.rst ([#360](https://www.github.com/googleapis/python-firestore/issues/360)) ([6e2c899](https://www.github.com/googleapis/python-firestore/commit/6e2c89989c73ece393c9d23c87f1fc67b500e079)) + ### [2.1.1](https://www.github.com/googleapis/python-firestore/compare/v2.1.0...v2.1.1) (2021-05-03) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f996e6c473..6d828ce5cf 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************************** Updating Conformance Tests diff --git a/docs/_static/custom.css b/docs/_static/custom.css index bcd37bbd3c..b0a295464b 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/docs/conf.py b/docs/conf.py index 22838f8c0b..e71309de6f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-firestore documentation build configuration file # @@ -350,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst index 1cb29d4ca9..536d17b2ea 100644 --- a/docs/multiprocessing.rst +++ b/docs/multiprocessing.rst @@ -1,7 +1,7 @@ .. note:: - Because this client uses :mod:`grpcio` library, it is safe to + Because this client uses :mod:`grpc` library, it is safe to share instances across threads. In multiprocessing scenarios, the best practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or :class:`multiprocessing.Process`. diff --git a/google/cloud/firestore_admin_v1/gapic_metadata.json b/google/cloud/firestore_admin_v1/gapic_metadata.json new file mode 100644 index 0000000000..d48820c0b2 --- /dev/null +++ b/google/cloud/firestore_admin_v1/gapic_metadata.json @@ -0,0 +1,113 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.firestore_admin_v1", + "protoPackage": "google.firestore.admin.v1", + "schema": "1.0", + "services": { + "FirestoreAdmin": { + "clients": { + "grpc": { + "libraryClient": "FirestoreAdminClient", + "rpcs": { + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportDocuments": { + "methods": [ + "export_documents" + ] + }, + "GetField": { + "methods": [ + "get_field" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListFields": { + "methods": [ + "list_fields" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + }, + "UpdateField": { + "methods": [ + "update_field" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FirestoreAdminAsyncClient", + "rpcs": { + "CreateIndex": { + "methods": [ + "create_index" + ] + }, + "DeleteIndex": { + "methods": [ + "delete_index" + ] + }, + "ExportDocuments": { + "methods": [ + "export_documents" + ] + }, + "GetField": { + "methods": [ + "get_field" + ] + }, + "GetIndex": { + "methods": [ + "get_index" + ] + }, + "ImportDocuments": { + "methods": [ + "import_documents" + ] + }, + "ListFields": { + "methods": [ + "list_fields" + ] + }, + "ListIndexes": { + "methods": [ + "list_indexes" + ] + }, + "UpdateField": { + "methods": [ + "update_field" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/firestore_admin_v1/services/__init__.py b/google/cloud/firestore_admin_v1/services/__init__.py index 42ffdf2bc4..4de65971c2 100644 --- a/google/cloud/firestore_admin_v1/services/__init__.py +++ b/google/cloud/firestore_admin_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py index 7005212e52..13dc946a7f 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import FirestoreAdminClient from .async_client import FirestoreAdminAsyncClient diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index ad6f760b84..de8414e68b 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,13 +20,13 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation as ga_operation # type: ignore +from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.types import field @@ -37,8 +35,7 @@ from google.cloud.firestore_admin_v1.types import index from google.cloud.firestore_admin_v1.types import index as gfa_index from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport from .client import FirestoreAdminClient @@ -64,31 +61,26 @@ class FirestoreAdminAsyncClient: parse_field_path = staticmethod(FirestoreAdminClient.parse_field_path) index_path = staticmethod(FirestoreAdminClient.index_path) parse_index_path = staticmethod(FirestoreAdminClient.parse_index_path) - common_billing_account_path = staticmethod( FirestoreAdminClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( FirestoreAdminClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(FirestoreAdminClient.common_folder_path) parse_common_folder_path = staticmethod( FirestoreAdminClient.parse_common_folder_path ) - common_organization_path = staticmethod( FirestoreAdminClient.common_organization_path ) parse_common_organization_path = staticmethod( FirestoreAdminClient.parse_common_organization_path ) - common_project_path = staticmethod(FirestoreAdminClient.common_project_path) parse_common_project_path = staticmethod( FirestoreAdminClient.parse_common_project_path ) - common_location_path = staticmethod(FirestoreAdminClient.common_location_path) parse_common_location_path = staticmethod( FirestoreAdminClient.parse_common_location_path @@ -96,7 +88,8 @@ class FirestoreAdminAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -111,7 +104,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -128,7 +121,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> FirestoreAdminTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: FirestoreAdminTransport: The transport used by the client instance. @@ -142,12 +135,12 @@ def transport(self) -> FirestoreAdminTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, FirestoreAdminTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the firestore admin client. + """Instantiates the firestore admin client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -179,7 +172,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = FirestoreAdminClient( credentials=credentials, transport=transport, @@ -221,7 +213,6 @@ async def create_index( This corresponds to the ``index`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -250,7 +241,6 @@ async def create_index( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if index is not None: @@ -306,7 +296,6 @@ async def list_indexes( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -336,7 +325,6 @@ async def list_indexes( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -349,9 +337,9 @@ async def list_indexes( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -399,7 +387,6 @@ async def get_index( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -427,7 +414,6 @@ async def get_index( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -440,9 +426,9 @@ async def get_index( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -484,7 +470,6 @@ async def delete_index( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -505,7 +490,6 @@ async def delete_index( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -518,9 +502,9 @@ async def delete_index( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -561,7 +545,6 @@ async def get_field( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -591,7 +574,6 @@ async def get_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -604,9 +586,9 @@ async def get_field( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -661,7 +643,6 @@ async def update_field( This corresponds to the ``field`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -695,7 +676,6 @@ async def update_field( # If we have keyword arguments corresponding to fields on the # request, apply these. - if field is not None: request.field = field @@ -758,7 +738,6 @@ async def list_fields( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -788,7 +767,6 @@ async def list_fields( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -801,9 +779,9 @@ async def list_fields( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -860,7 +838,6 @@ async def export_documents( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -892,7 +869,6 @@ async def export_documents( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -952,7 +928,6 @@ async def import_documents( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -992,7 +967,6 @@ async def import_documents( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1017,7 +991,7 @@ async def import_documents( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=gfa_operation.ImportDocumentsMetadata, ) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index dd8cf373d1..490b9465ea 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,16 +21,16 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation as ga_operation # type: ignore +from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.types import field @@ -41,8 +39,7 @@ from google.cloud.firestore_admin_v1.types import index from google.cloud.firestore_admin_v1.types import index as gfa_index from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import FirestoreAdminGrpcTransport from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport @@ -63,7 +60,7 @@ class FirestoreAdminClientMeta(type): _transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[FirestoreAdminTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -88,7 +85,8 @@ class FirestoreAdminClient(metaclass=FirestoreAdminClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -122,7 +120,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -139,7 +138,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -158,23 +157,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> FirestoreAdminTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - FirestoreAdminTransport: The transport used by the client instance. + FirestoreAdminTransport: The transport used by the client + instance. """ return self._transport @staticmethod def collection_group_path(project: str, database: str, collection: str,) -> str: - """Return a fully-qualified collection_group string.""" + """Returns a fully-qualified collection_group string.""" return "projects/{project}/databases/{database}/collectionGroups/{collection}".format( project=project, database=database, collection=collection, ) @staticmethod def parse_collection_group_path(path: str) -> Dict[str, str]: - """Parse a collection_group path into its component segments.""" + """Parses a collection_group path into its component segments.""" m = re.match( r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)$", path, @@ -183,27 +183,27 @@ def parse_collection_group_path(path: str) -> Dict[str, str]: @staticmethod def database_path(project: str, database: str,) -> str: - """Return a fully-qualified database string.""" + """Returns a fully-qualified database string.""" return "projects/{project}/databases/{database}".format( project=project, database=database, ) @staticmethod def parse_database_path(path: str) -> Dict[str, str]: - """Parse a database path into its component segments.""" + """Parses a database path into its component segments.""" m = re.match(r"^projects/(?P.+?)/databases/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def field_path(project: str, database: str, collection: str, field: str,) -> str: - """Return a fully-qualified field string.""" + """Returns a fully-qualified field string.""" return "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format( project=project, database=database, collection=collection, field=field, ) @staticmethod def parse_field_path(path: str) -> Dict[str, str]: - """Parse a field path into its component segments.""" + """Parses a field path into its component segments.""" m = re.match( r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/fields/(?P.+?)$", path, @@ -212,14 +212,14 @@ def parse_field_path(path: str) -> Dict[str, str]: @staticmethod def index_path(project: str, database: str, collection: str, index: str,) -> str: - """Return a fully-qualified index string.""" + """Returns a fully-qualified index string.""" return "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format( project=project, database=database, collection=collection, index=index, ) @staticmethod def parse_index_path(path: str) -> Dict[str, str]: - """Parse a index path into its component segments.""" + """Parses a index path into its component segments.""" m = re.match( r"^projects/(?P.+?)/databases/(?P.+?)/collectionGroups/(?P.+?)/indexes/(?P.+?)$", path, @@ -228,7 +228,7 @@ def parse_index_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -241,7 +241,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -252,7 +252,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -263,7 +263,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -274,7 +274,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -288,12 +288,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, FirestoreAdminTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the firestore admin client. + """Instantiates the firestore admin client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -348,9 +348,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -362,12 +363,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -382,8 +385,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -407,7 +410,7 @@ def create_index( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ga_operation.Operation: + ) -> gac_operation.Operation: r"""Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to track the status of the creation. The @@ -432,7 +435,6 @@ def create_index( This corresponds to the ``index`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -463,10 +465,8 @@ def create_index( # there are no flattened fields. if not isinstance(request, firestore_admin.CreateIndexRequest): request = firestore_admin.CreateIndexRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if index is not None: @@ -486,7 +486,7 @@ def create_index( response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. - response = ga_operation.from_gapic( + response = gac_operation.from_gapic( response, self._transport.operations_client, gfa_index.Index, @@ -518,7 +518,6 @@ def list_indexes( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -550,10 +549,8 @@ def list_indexes( # there are no flattened fields. if not isinstance(request, firestore_admin.ListIndexesRequest): request = firestore_admin.ListIndexesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -601,7 +598,6 @@ def get_index( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -631,10 +627,8 @@ def get_index( # there are no flattened fields. if not isinstance(request, firestore_admin.GetIndexRequest): request = firestore_admin.GetIndexRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -676,7 +670,6 @@ def delete_index( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -699,10 +692,8 @@ def delete_index( # there are no flattened fields. if not isinstance(request, firestore_admin.DeleteIndexRequest): request = firestore_admin.DeleteIndexRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -743,7 +734,6 @@ def get_field( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -775,10 +765,8 @@ def get_field( # there are no flattened fields. if not isinstance(request, firestore_admin.GetFieldRequest): request = firestore_admin.GetFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -806,7 +794,7 @@ def update_field( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ga_operation.Operation: + ) -> gac_operation.Operation: r"""Updates a field configuration. Currently, field updates apply only to single field index configuration. However, calls to [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] @@ -833,7 +821,6 @@ def update_field( This corresponds to the ``field`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -869,10 +856,8 @@ def update_field( # there are no flattened fields. if not isinstance(request, firestore_admin.UpdateFieldRequest): request = firestore_admin.UpdateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if field is not None: request.field = field @@ -892,7 +877,7 @@ def update_field( response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. - response = ga_operation.from_gapic( + response = gac_operation.from_gapic( response, self._transport.operations_client, gfa_field.Field, @@ -931,7 +916,6 @@ def list_fields( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -963,10 +947,8 @@ def list_fields( # there are no flattened fields. if not isinstance(request, firestore_admin.ListFieldsRequest): request = firestore_admin.ListFieldsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1000,7 +982,7 @@ def export_documents( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ga_operation.Operation: + ) -> gac_operation.Operation: r"""Exports a copy of all or a subset of documents from Google Cloud Firestore to another storage system, such as Google Cloud Storage. Recent updates to documents may @@ -1023,7 +1005,6 @@ def export_documents( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1057,10 +1038,8 @@ def export_documents( # there are no flattened fields. if not isinstance(request, firestore_admin.ExportDocumentsRequest): request = firestore_admin.ExportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1078,7 +1057,7 @@ def export_documents( response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. - response = ga_operation.from_gapic( + response = gac_operation.from_gapic( response, self._transport.operations_client, gfa_operation.ExportDocumentsResponse, @@ -1096,7 +1075,7 @@ def import_documents( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ga_operation.Operation: + ) -> gac_operation.Operation: r"""Imports documents into Google Cloud Firestore. Existing documents with the same name are overwritten. The import occurs in the background and its progress can @@ -1116,7 +1095,6 @@ def import_documents( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1158,10 +1136,8 @@ def import_documents( # there are no flattened fields. if not isinstance(request, firestore_admin.ImportDocumentsRequest): request = firestore_admin.ImportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1179,10 +1155,10 @@ def import_documents( response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. - response = ga_operation.from_gapic( + response = gac_operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=gfa_operation.ImportDocumentsMetadata, ) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py index 0b51a2c851..fbb7d0dc14 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -247,7 +245,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py index 7ddd11ebd5..d98e246bc6 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py index f81e653de7..4a7d6c0b51 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.firestore_admin_v1.types import field from google.cloud.firestore_admin_v1.types import firestore_admin from google.cloud.firestore_admin_v1.types import index -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -42,6 +41,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class FirestoreAdminTransport(abc.ABC): """Abstract transport class for FirestoreAdmin.""" @@ -51,21 +61,24 @@ class FirestoreAdminTransport(abc.ABC): "https://www.googleapis.com/auth/datastore", ) + DEFAULT_HOST: str = "firestore.googleapis.com" + def __init__( self, *, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -74,7 +87,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -88,29 +101,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -124,9 +184,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -140,9 +200,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -156,9 +216,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -172,9 +232,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -191,9 +251,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -216,20 +276,20 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_index( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.CreateIndexRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_indexes( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.ListIndexesRequest], - typing.Union[ + Union[ firestore_admin.ListIndexesResponse, - typing.Awaitable[firestore_admin.ListIndexesResponse], + Awaitable[firestore_admin.ListIndexesResponse], ], ]: raise NotImplementedError() @@ -237,47 +297,45 @@ def list_indexes( @property def get_index( self, - ) -> typing.Callable[ - [firestore_admin.GetIndexRequest], - typing.Union[index.Index, typing.Awaitable[index.Index]], + ) -> Callable[ + [firestore_admin.GetIndexRequest], Union[index.Index, Awaitable[index.Index]] ]: raise NotImplementedError() @property def delete_index( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.DeleteIndexRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_field( self, - ) -> typing.Callable[ - [firestore_admin.GetFieldRequest], - typing.Union[field.Field, typing.Awaitable[field.Field]], + ) -> Callable[ + [firestore_admin.GetFieldRequest], Union[field.Field, Awaitable[field.Field]] ]: raise NotImplementedError() @property def update_field( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.UpdateFieldRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_fields( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.ListFieldsRequest], - typing.Union[ + Union[ firestore_admin.ListFieldsResponse, - typing.Awaitable[firestore_admin.ListFieldsResponse], + Awaitable[firestore_admin.ListFieldsResponse], ], ]: raise NotImplementedError() @@ -285,18 +343,18 @@ def list_fields( @property def export_documents( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.ExportDocumentsRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def import_documents( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore_admin.ImportDocumentsRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py index b3472f2576..630cbef3eb 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -30,9 +28,8 @@ from google.cloud.firestore_admin_v1.types import field from google.cloud.firestore_admin_v1.types import firestore_admin from google.cloud.firestore_admin_v1.types import index -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO @@ -56,7 +53,7 @@ def __init__( self, *, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -70,7 +67,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -181,7 +179,7 @@ def __init__( def create_channel( cls, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -212,13 +210,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -245,7 +245,7 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_index( self, - ) -> Callable[[firestore_admin.CreateIndexRequest], operations.Operation]: + ) -> Callable[[firestore_admin.CreateIndexRequest], operations_pb2.Operation]: r"""Return a callable for the create index method over gRPC. Creates a composite index. This returns a @@ -268,7 +268,7 @@ def create_index( self._stubs["create_index"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", request_serializer=firestore_admin.CreateIndexRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_index"] @@ -327,7 +327,7 @@ def get_index(self) -> Callable[[firestore_admin.GetIndexRequest], index.Index]: @property def delete_index( self, - ) -> Callable[[firestore_admin.DeleteIndexRequest], empty.Empty]: + ) -> Callable[[firestore_admin.DeleteIndexRequest], empty_pb2.Empty]: r"""Return a callable for the delete index method over gRPC. Deletes a composite index. @@ -346,7 +346,7 @@ def delete_index( self._stubs["delete_index"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", request_serializer=firestore_admin.DeleteIndexRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_index"] @@ -377,7 +377,7 @@ def get_field(self) -> Callable[[firestore_admin.GetFieldRequest], field.Field]: @property def update_field( self, - ) -> Callable[[firestore_admin.UpdateFieldRequest], operations.Operation]: + ) -> Callable[[firestore_admin.UpdateFieldRequest], operations_pb2.Operation]: r"""Return a callable for the update field method over gRPC. Updates a field configuration. Currently, field updates apply @@ -411,7 +411,7 @@ def update_field( self._stubs["update_field"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", request_serializer=firestore_admin.UpdateFieldRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_field"] @@ -453,7 +453,7 @@ def list_fields( @property def export_documents( self, - ) -> Callable[[firestore_admin.ExportDocumentsRequest], operations.Operation]: + ) -> Callable[[firestore_admin.ExportDocumentsRequest], operations_pb2.Operation]: r"""Return a callable for the export documents method over gRPC. Exports a copy of all or a subset of documents from @@ -481,14 +481,14 @@ def export_documents( self._stubs["export_documents"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", request_serializer=firestore_admin.ExportDocumentsRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_documents"] @property def import_documents( self, - ) -> Callable[[firestore_admin.ImportDocumentsRequest], operations.Operation]: + ) -> Callable[[firestore_admin.ImportDocumentsRequest], operations_pb2.Operation]: r"""Return a callable for the import documents method over gRPC. Imports documents into Google Cloud Firestore. @@ -513,7 +513,7 @@ def import_documents( self._stubs["import_documents"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", request_serializer=firestore_admin.ImportDocumentsRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["import_documents"] diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py index 927c5dc9a9..f8779a4a24 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -31,9 +29,8 @@ from google.cloud.firestore_admin_v1.types import field from google.cloud.firestore_admin_v1.types import firestore_admin from google.cloud.firestore_admin_v1.types import index -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreAdminGrpcTransport @@ -59,7 +56,7 @@ class FirestoreAdminGrpcAsyncIOTransport(FirestoreAdminTransport): def create_channel( cls, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -86,13 +83,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -100,7 +99,7 @@ def __init__( self, *, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -114,7 +113,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -173,7 +173,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -252,7 +251,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: def create_index( self, ) -> Callable[ - [firestore_admin.CreateIndexRequest], Awaitable[operations.Operation] + [firestore_admin.CreateIndexRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create index method over gRPC. @@ -276,7 +275,7 @@ def create_index( self._stubs["create_index"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", request_serializer=firestore_admin.CreateIndexRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_index"] @@ -338,7 +337,7 @@ def get_index( @property def delete_index( self, - ) -> Callable[[firestore_admin.DeleteIndexRequest], Awaitable[empty.Empty]]: + ) -> Callable[[firestore_admin.DeleteIndexRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete index method over gRPC. Deletes a composite index. @@ -357,7 +356,7 @@ def delete_index( self._stubs["delete_index"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", request_serializer=firestore_admin.DeleteIndexRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_index"] @@ -391,7 +390,7 @@ def get_field( def update_field( self, ) -> Callable[ - [firestore_admin.UpdateFieldRequest], Awaitable[operations.Operation] + [firestore_admin.UpdateFieldRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update field method over gRPC. @@ -426,7 +425,7 @@ def update_field( self._stubs["update_field"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", request_serializer=firestore_admin.UpdateFieldRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_field"] @@ -470,7 +469,7 @@ def list_fields( def export_documents( self, ) -> Callable[ - [firestore_admin.ExportDocumentsRequest], Awaitable[operations.Operation] + [firestore_admin.ExportDocumentsRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the export documents method over gRPC. @@ -499,7 +498,7 @@ def export_documents( self._stubs["export_documents"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", request_serializer=firestore_admin.ExportDocumentsRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_documents"] @@ -507,7 +506,7 @@ def export_documents( def import_documents( self, ) -> Callable[ - [firestore_admin.ImportDocumentsRequest], Awaitable[operations.Operation] + [firestore_admin.ImportDocumentsRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the import documents method over gRPC. @@ -533,7 +532,7 @@ def import_documents( self._stubs["import_documents"] = self.grpc_channel.unary_unary( "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", request_serializer=firestore_admin.ImportDocumentsRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["import_documents"] diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py index f6838c6248..9cd047fc7a 100644 --- a/google/cloud/firestore_admin_v1/types/__init__.py +++ b/google/cloud/firestore_admin_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .field import Field from .firestore_admin import ( CreateIndexRequest, diff --git a/google/cloud/firestore_admin_v1/types/field.py b/google/cloud/firestore_admin_v1/types/field.py index 00f1fa29bc..5c28cc2f6d 100644 --- a/google/cloud/firestore_admin_v1/types/field.py +++ b/google/cloud/firestore_admin_v1/types/field.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.firestore_admin_v1.types import index @@ -65,7 +62,6 @@ class Field(proto.Message): class IndexConfig(proto.Message): r"""The index configuration for this field. - Attributes: indexes (Sequence[google.cloud.firestore_admin_v1.types.Index]): The indexes supported for this field. @@ -90,15 +86,11 @@ class IndexConfig(proto.Message): """ indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=index.Index,) + uses_ancestor_config = proto.Field(proto.BOOL, number=2,) + ancestor_field = proto.Field(proto.STRING, number=3,) + reverting = proto.Field(proto.BOOL, number=4,) - uses_ancestor_config = proto.Field(proto.BOOL, number=2) - - ancestor_field = proto.Field(proto.STRING, number=3) - - reverting = proto.Field(proto.BOOL, number=4) - - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) index_config = proto.Field(proto.MESSAGE, number=2, message=IndexConfig,) diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py index d3eae822ca..27c0ed1677 100644 --- a/google/cloud/firestore_admin_v1/types/firestore_admin.py +++ b/google/cloud/firestore_admin_v1/types/firestore_admin.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.firestore_admin_v1.types import field as gfa_field from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore __protobuf__ = proto.module( @@ -53,8 +50,7 @@ class CreateIndexRequest(proto.Message): Required. The composite index to create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) index = proto.Field(proto.MESSAGE, number=2, message=gfa_index.Index,) @@ -76,13 +72,10 @@ class ListIndexesRequest(proto.Message): that may be used to get the next page of results. """ - parent = proto.Field(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - page_token = proto.Field(proto.STRING, number=4) + parent = proto.Field(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + page_token = proto.Field(proto.STRING, number=4,) class ListIndexesResponse(proto.Message): @@ -103,8 +96,7 @@ def raw_page(self): return self indexes = proto.RepeatedField(proto.MESSAGE, number=1, message=gfa_index.Index,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetIndexRequest(proto.Message): @@ -117,7 +109,7 @@ class GetIndexRequest(proto.Message): ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteIndexRequest(proto.Message): @@ -130,7 +122,7 @@ class DeleteIndexRequest(proto.Message): ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateFieldRequest(proto.Message): @@ -147,8 +139,9 @@ class UpdateFieldRequest(proto.Message): """ field = proto.Field(proto.MESSAGE, number=1, message=gfa_field.Field,) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class GetFieldRequest(proto.Message): @@ -161,7 +154,7 @@ class GetFieldRequest(proto.Message): ``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListFieldsRequest(proto.Message): @@ -188,13 +181,10 @@ class ListFieldsRequest(proto.Message): that may be used to get the next page of results. """ - parent = proto.Field(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - page_token = proto.Field(proto.STRING, number=4) + parent = proto.Field(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + page_token = proto.Field(proto.STRING, number=4,) class ListFieldsResponse(proto.Message): @@ -215,8 +205,7 @@ def raw_page(self): return self fields = proto.RepeatedField(proto.MESSAGE, number=1, message=gfa_field.Field,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ExportDocumentsRequest(proto.Message): @@ -242,11 +231,9 @@ class ExportDocumentsRequest(proto.Message): generated based on the start time. """ - name = proto.Field(proto.STRING, number=1) - - collection_ids = proto.RepeatedField(proto.STRING, number=2) - - output_uri_prefix = proto.Field(proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + collection_ids = proto.RepeatedField(proto.STRING, number=2,) + output_uri_prefix = proto.Field(proto.STRING, number=3,) class ImportDocumentsRequest(proto.Message): @@ -267,11 +254,9 @@ class ImportDocumentsRequest(proto.Message): [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix]. """ - name = proto.Field(proto.STRING, number=1) - - collection_ids = proto.RepeatedField(proto.STRING, number=2) - - input_uri_prefix = proto.Field(proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + collection_ids = proto.RepeatedField(proto.STRING, number=2,) + input_uri_prefix = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/index.py b/google/cloud/firestore_admin_v1/types/index.py index cbac4cf9dd..9d55ebe91a 100644 --- a/google/cloud/firestore_admin_v1/types/index.py +++ b/google/cloud/firestore_admin_v1/types/index.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -109,12 +107,10 @@ class ArrayConfig(proto.Enum): ARRAY_CONFIG_UNSPECIFIED = 0 CONTAINS = 1 - field_path = proto.Field(proto.STRING, number=1) - + field_path = proto.Field(proto.STRING, number=1,) order = proto.Field( proto.ENUM, number=2, oneof="value_mode", enum="Index.IndexField.Order", ) - array_config = proto.Field( proto.ENUM, number=3, @@ -122,12 +118,9 @@ class ArrayConfig(proto.Enum): enum="Index.IndexField.ArrayConfig", ) - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) query_scope = proto.Field(proto.ENUM, number=2, enum=QueryScope,) - fields = proto.RepeatedField(proto.MESSAGE, number=3, message=IndexField,) - state = proto.Field(proto.ENUM, number=4, enum=State,) diff --git a/google/cloud/firestore_admin_v1/types/location.py b/google/cloud/firestore_admin_v1/types/location.py index 5259f44be9..c4442e0f5b 100644 --- a/google/cloud/firestore_admin_v1/types/location.py +++ b/google/cloud/firestore_admin_v1/types/location.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -26,7 +24,7 @@ class LocationMetadata(proto.Message): r"""The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. - """ + """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_admin_v1/types/operation.py b/google/cloud/firestore_admin_v1/types/operation.py index 628b27ccb4..33b9a82da9 100644 --- a/google/cloud/firestore_admin_v1/types/operation.py +++ b/google/cloud/firestore_admin_v1/types/operation.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -73,16 +70,11 @@ class IndexOperationMetadata(proto.Message): The progress, in bytes, of this operation. """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - index = proto.Field(proto.STRING, number=3) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + index = proto.Field(proto.STRING, number=3,) state = proto.Field(proto.ENUM, number=4, enum="OperationState",) - progress_documents = proto.Field(proto.MESSAGE, number=5, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=6, message="Progress",) @@ -117,7 +109,6 @@ class FieldOperationMetadata(proto.Message): class IndexConfigDelta(proto.Message): r"""Information about an index configuration change. - Attributes: change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta.ChangeType): Specifies how the index is changing. @@ -136,23 +127,16 @@ class ChangeType(proto.Enum): number=1, enum="FieldOperationMetadata.IndexConfigDelta.ChangeType", ) - index = proto.Field(proto.MESSAGE, number=2, message=gfa_index.Index,) - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - field = proto.Field(proto.STRING, number=3) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + field = proto.Field(proto.STRING, number=3,) index_config_deltas = proto.RepeatedField( proto.MESSAGE, number=4, message=IndexConfigDelta, ) - state = proto.Field(proto.ENUM, number=5, enum="OperationState",) - progress_documents = proto.Field(proto.MESSAGE, number=6, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=7, message="Progress",) @@ -181,19 +165,13 @@ class ExportDocumentsMetadata(proto.Message): Where the entities are being exported to. """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) operation_state = proto.Field(proto.ENUM, number=3, enum="OperationState",) - progress_documents = proto.Field(proto.MESSAGE, number=4, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=5, message="Progress",) - - collection_ids = proto.RepeatedField(proto.STRING, number=6) - - output_uri_prefix = proto.Field(proto.STRING, number=7) + collection_ids = proto.RepeatedField(proto.STRING, number=6,) + output_uri_prefix = proto.Field(proto.STRING, number=7,) class ImportDocumentsMetadata(proto.Message): @@ -221,19 +199,13 @@ class ImportDocumentsMetadata(proto.Message): The location of the documents being imported. """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) operation_state = proto.Field(proto.ENUM, number=3, enum="OperationState",) - progress_documents = proto.Field(proto.MESSAGE, number=4, message="Progress",) - progress_bytes = proto.Field(proto.MESSAGE, number=5, message="Progress",) - - collection_ids = proto.RepeatedField(proto.STRING, number=6) - - input_uri_prefix = proto.Field(proto.STRING, number=7) + collection_ids = proto.RepeatedField(proto.STRING, number=6,) + input_uri_prefix = proto.Field(proto.STRING, number=7,) class ExportDocumentsResponse(proto.Message): @@ -249,7 +221,7 @@ class ExportDocumentsResponse(proto.Message): operation completes successfully. """ - output_uri_prefix = proto.Field(proto.STRING, number=1) + output_uri_prefix = proto.Field(proto.STRING, number=1,) class Progress(proto.Message): @@ -264,9 +236,8 @@ class Progress(proto.Message): The amount of work completed. """ - estimated_work = proto.Field(proto.INT64, number=1) - - completed_work = proto.Field(proto.INT64, number=2) + estimated_work = proto.Field(proto.INT64, number=1,) + completed_work = proto.Field(proto.INT64, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_bundle/__init__.py b/google/cloud/firestore_bundle/__init__.py index d1ffaeff58..8d6b30e32d 100644 --- a/google/cloud/firestore_bundle/__init__.py +++ b/google/cloud/firestore_bundle/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,20 +14,20 @@ # limitations under the License. # -from .types.bundle import BundleElement -from .types.bundle import BundleMetadata + from .types.bundle import BundledDocumentMetadata from .types.bundle import BundledQuery +from .types.bundle import BundleElement +from .types.bundle import BundleMetadata from .types.bundle import NamedQuery from .bundle import FirestoreBundle - __all__ = ( "BundleElement", "BundleMetadata", "BundledDocumentMetadata", - "NamedQuery", "BundledQuery", "FirestoreBundle", + "NamedQuery", ) diff --git a/google/cloud/firestore_bundle/gapic_metadata.json b/google/cloud/firestore_bundle/gapic_metadata.json new file mode 100644 index 0000000000..e81fe51253 --- /dev/null +++ b/google/cloud/firestore_bundle/gapic_metadata.json @@ -0,0 +1,7 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bundle", + "protoPackage": "google.firestore.bundle", + "schema": "1.0" +} diff --git a/google/cloud/firestore_bundle/services/__init__.py b/google/cloud/firestore_bundle/services/__init__.py index 42ffdf2bc4..4de65971c2 100644 --- a/google/cloud/firestore_bundle/services/__init__.py +++ b/google/cloud/firestore_bundle/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/firestore_bundle/types/__init__.py b/google/cloud/firestore_bundle/types/__init__.py index 737862b173..7020b654d3 100644 --- a/google/cloud/firestore_bundle/types/__init__.py +++ b/google/cloud/firestore_bundle/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .bundle import ( BundledDocumentMetadata, BundledQuery, diff --git a/google/cloud/firestore_bundle/types/bundle.py b/google/cloud/firestore_bundle/types/bundle.py index 3d78bfe00f..192c260965 100644 --- a/google/cloud/firestore_bundle/types/bundle.py +++ b/google/cloud/firestore_bundle/types/bundle.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.cloud.firestore_v1.types import document as gfv_document -from google.cloud.firestore_v1.types import query -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.cloud.firestore_v1.types import document as document_pb2 # type: ignore +from google.cloud.firestore_v1.types import query as query_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -37,7 +34,6 @@ class BundledQuery(proto.Message): r"""Encodes a query saved in the bundle. - Attributes: parent (str): The parent resource name. @@ -54,12 +50,10 @@ class LimitType(proto.Enum): FIRST = 0 LAST = 1 - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) structured_query = proto.Field( - proto.MESSAGE, number=2, oneof="query_type", message=query.StructuredQuery, + proto.MESSAGE, number=2, oneof="query_type", message=query_pb2.StructuredQuery, ) - limit_type = proto.Field(proto.ENUM, number=3, enum=LimitType,) @@ -83,16 +77,13 @@ class NamedQuery(proto.Message): client SDKs. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) bundled_query = proto.Field(proto.MESSAGE, number=2, message="BundledQuery",) - - read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) class BundledDocumentMetadata(proto.Message): r"""Metadata describing a Firestore document saved in the bundle. - Attributes: name (str): The document key of a bundled document. @@ -106,18 +97,14 @@ class BundledDocumentMetadata(proto.Message): this document matches to. """ - name = proto.Field(proto.STRING, number=1) - - read_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - exists = proto.Field(proto.BOOL, number=3) - - queries = proto.RepeatedField(proto.STRING, number=4) + name = proto.Field(proto.STRING, number=1,) + read_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + exists = proto.Field(proto.BOOL, number=3,) + queries = proto.RepeatedField(proto.STRING, number=4,) class BundleMetadata(proto.Message): r"""Metadata describing the bundle file/stream. - Attributes: id (str): The ID of the bundle. @@ -133,15 +120,11 @@ class BundleMetadata(proto.Message): ``BundleMetadata``. """ - id = proto.Field(proto.STRING, number=1) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - version = proto.Field(proto.UINT32, number=3) - - total_documents = proto.Field(proto.UINT32, number=4) - - total_bytes = proto.Field(proto.UINT64, number=5) + id = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + version = proto.Field(proto.UINT32, number=3,) + total_documents = proto.Field(proto.UINT32, number=4,) + total_bytes = proto.Field(proto.UINT64, number=5,) class BundleElement(proto.Message): @@ -165,20 +148,17 @@ class BundleElement(proto.Message): metadata = proto.Field( proto.MESSAGE, number=1, oneof="element_type", message="BundleMetadata", ) - named_query = proto.Field( proto.MESSAGE, number=2, oneof="element_type", message="NamedQuery", ) - document_metadata = proto.Field( proto.MESSAGE, number=3, oneof="element_type", message="BundledDocumentMetadata", ) - document = proto.Field( - proto.MESSAGE, number=4, oneof="element_type", message=gfv_document.Document, + proto.MESSAGE, number=4, oneof="element_type", message=document_pb2.Document, ) diff --git a/google/cloud/firestore_v1/gapic_metadata.json b/google/cloud/firestore_v1/gapic_metadata.json new file mode 100644 index 0000000000..a7bfee2f6d --- /dev/null +++ b/google/cloud/firestore_v1/gapic_metadata.json @@ -0,0 +1,173 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.firestore_v1", + "protoPackage": "google.firestore.v1", + "schema": "1.0", + "services": { + "Firestore": { + "clients": { + "grpc": { + "libraryClient": "FirestoreClient", + "rpcs": { + "BatchGetDocuments": { + "methods": [ + "batch_get_documents" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListCollectionIds": { + "methods": [ + "list_collection_ids" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "Listen": { + "methods": [ + "listen" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + }, + "Write": { + "methods": [ + "write" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FirestoreAsyncClient", + "rpcs": { + "BatchGetDocuments": { + "methods": [ + "batch_get_documents" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListCollectionIds": { + "methods": [ + "list_collection_ids" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "Listen": { + "methods": [ + "listen" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "RunQuery": { + "methods": [ + "run_query" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + }, + "Write": { + "methods": [ + "write" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/firestore_v1/services/__init__.py b/google/cloud/firestore_v1/services/__init__.py index 42ffdf2bc4..4de65971c2 100644 --- a/google/cloud/firestore_v1/services/__init__.py +++ b/google/cloud/firestore_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/firestore_v1/services/firestore/__init__.py b/google/cloud/firestore_v1/services/firestore/__init__.py index 14099c8671..fd8da8671e 100644 --- a/google/cloud/firestore_v1/services/firestore/__init__.py +++ b/google/cloud/firestore_v1/services/firestore/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import FirestoreClient from .async_client import FirestoreAsyncClient diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py index 777f3784df..10743455c2 100644 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ b/google/cloud/firestore_v1/services/firestore/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -31,10 +29,10 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.firestore_v1.services.firestore import pagers @@ -44,9 +42,8 @@ from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import query from google.cloud.firestore_v1.types import write as gf_write -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport from .client import FirestoreClient @@ -74,18 +71,14 @@ class FirestoreAsyncClient: parse_common_billing_account_path = staticmethod( FirestoreClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(FirestoreClient.common_folder_path) parse_common_folder_path = staticmethod(FirestoreClient.parse_common_folder_path) - common_organization_path = staticmethod(FirestoreClient.common_organization_path) parse_common_organization_path = staticmethod( FirestoreClient.parse_common_organization_path ) - common_project_path = staticmethod(FirestoreClient.common_project_path) parse_common_project_path = staticmethod(FirestoreClient.parse_common_project_path) - common_location_path = staticmethod(FirestoreClient.common_location_path) parse_common_location_path = staticmethod( FirestoreClient.parse_common_location_path @@ -93,7 +86,8 @@ class FirestoreAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -108,7 +102,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -125,7 +119,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> FirestoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: FirestoreTransport: The transport used by the client instance. @@ -139,12 +133,12 @@ def transport(self) -> FirestoreTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, FirestoreTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the firestore client. + """Instantiates the firestore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -176,7 +170,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = FirestoreClient( credentials=credentials, transport=transport, @@ -198,7 +191,6 @@ async def get_document( request (:class:`google.cloud.firestore_v1.types.GetDocumentRequest`): The request object. The request for [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -212,7 +204,6 @@ async def get_document( """ # Create or coerce a protobuf request object. - request = firestore.GetDocumentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -224,9 +215,10 @@ async def get_document( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -260,7 +252,6 @@ async def list_documents( request (:class:`google.cloud.firestore_v1.types.ListDocumentsRequest`): The request object. The request for [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -277,7 +268,6 @@ async def list_documents( """ # Create or coerce a protobuf request object. - request = firestore.ListDocumentsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -289,9 +279,10 @@ async def list_documents( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -355,7 +346,6 @@ async def update_document( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -382,7 +372,6 @@ async def update_document( # If we have keyword arguments corresponding to fields on the # request, apply these. - if document is not None: request.document = document if update_mask is not None: @@ -396,7 +385,10 @@ async def update_document( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -440,7 +432,6 @@ async def delete_document( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -461,7 +452,6 @@ async def delete_document( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -474,9 +464,10 @@ async def delete_document( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -511,7 +502,6 @@ def batch_get_documents( request (:class:`google.cloud.firestore_v1.types.BatchGetDocumentsRequest`): The request object. The request for [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -525,7 +515,6 @@ def batch_get_documents( """ # Create or coerce a protobuf request object. - request = firestore.BatchGetDocumentsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -537,9 +526,10 @@ def batch_get_documents( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -581,7 +571,6 @@ async def begin_transaction( This corresponds to the ``database`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -608,7 +597,6 @@ async def begin_transaction( # If we have keyword arguments corresponding to fields on the # request, apply these. - if database is not None: request.database = database @@ -621,9 +609,10 @@ async def begin_transaction( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -674,7 +663,6 @@ async def commit( This corresponds to the ``writes`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -701,10 +689,8 @@ async def commit( # If we have keyword arguments corresponding to fields on the # request, apply these. - if database is not None: request.database = database - if writes: request.writes.extend(writes) @@ -716,7 +702,10 @@ async def commit( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -765,7 +754,6 @@ async def rollback( This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -786,7 +774,6 @@ async def rollback( # If we have keyword arguments corresponding to fields on the # request, apply these. - if database is not None: request.database = database if transaction is not None: @@ -801,9 +788,10 @@ async def rollback( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -836,7 +824,6 @@ def run_query( request (:class:`google.cloud.firestore_v1.types.RunQueryRequest`): The request object. The request for [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -850,7 +837,6 @@ def run_query( """ # Create or coerce a protobuf request object. - request = firestore.RunQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -862,9 +848,10 @@ def run_query( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -902,7 +889,6 @@ async def partition_query( request (:class:`google.cloud.firestore_v1.types.PartitionQueryRequest`): The request object. The request for [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -919,7 +905,6 @@ async def partition_query( """ # Create or coerce a protobuf request object. - request = firestore.PartitionQueryRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -931,9 +916,10 @@ async def partition_query( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -1051,9 +1037,10 @@ def listen( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=86400.0, ), @@ -1095,7 +1082,6 @@ async def list_collection_ids( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1125,7 +1111,6 @@ async def list_collection_ids( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1138,9 +1123,10 @@ async def list_collection_ids( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1190,7 +1176,6 @@ async def batch_write( request (:class:`google.cloud.firestore_v1.types.BatchWriteRequest`): The request object. The request for [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1204,7 +1189,6 @@ async def batch_write( """ # Create or coerce a protobuf request object. - request = firestore.BatchWriteRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1216,7 +1200,9 @@ async def batch_write( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, exceptions.ServiceUnavailable, + core_exceptions.Aborted, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1250,7 +1236,6 @@ async def create_document( request (:class:`google.cloud.firestore_v1.types.CreateDocumentRequest`): The request object. The request for [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1264,7 +1249,6 @@ async def create_document( """ # Create or coerce a protobuf request object. - request = firestore.CreateDocumentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1275,7 +1259,10 @@ async def create_document( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index bd451dc257..126723d505 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -33,10 +31,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -49,9 +47,8 @@ from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import query from google.cloud.firestore_v1.types import write as gf_write -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import FirestoreGrpcTransport from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport @@ -70,7 +67,7 @@ class FirestoreClientMeta(type): _transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[FirestoreTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -101,7 +98,8 @@ class FirestoreClient(metaclass=FirestoreClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -135,7 +133,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -152,7 +151,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -171,16 +170,17 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> FirestoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - FirestoreTransport: The transport used by the client instance. + FirestoreTransport: The transport used by the client + instance. """ return self._transport @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -193,7 +193,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -204,7 +204,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -215,7 +215,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -226,7 +226,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -240,12 +240,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, FirestoreTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the firestore client. + """Instantiates the firestore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -300,9 +300,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -314,12 +315,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -334,8 +337,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -364,7 +367,6 @@ def get_document( request (google.cloud.firestore_v1.types.GetDocumentRequest): The request object. The request for [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -378,7 +380,6 @@ def get_document( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.GetDocumentRequest. # There's no risk of modifying the input as we've already verified @@ -416,7 +417,6 @@ def list_documents( request (google.cloud.firestore_v1.types.ListDocumentsRequest): The request object. The request for [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -433,7 +433,6 @@ def list_documents( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.ListDocumentsRequest. # There's no risk of modifying the input as we've already verified @@ -501,7 +500,6 @@ def update_document( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -530,10 +528,8 @@ def update_document( # there are no flattened fields. if not isinstance(request, firestore.UpdateDocumentRequest): request = firestore.UpdateDocumentRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if document is not None: request.document = document if update_mask is not None: @@ -580,7 +576,6 @@ def delete_document( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -603,10 +598,8 @@ def delete_document( # there are no flattened fields. if not isinstance(request, firestore.DeleteDocumentRequest): request = firestore.DeleteDocumentRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -641,7 +634,6 @@ def batch_get_documents( request (google.cloud.firestore_v1.types.BatchGetDocumentsRequest): The request object. The request for [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -655,7 +647,6 @@ def batch_get_documents( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.BatchGetDocumentsRequest. # There's no risk of modifying the input as we've already verified @@ -701,7 +692,6 @@ def begin_transaction( This corresponds to the ``database`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -730,10 +720,8 @@ def begin_transaction( # there are no flattened fields. if not isinstance(request, firestore.BeginTransactionRequest): request = firestore.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if database is not None: request.database = database @@ -784,7 +772,6 @@ def commit( This corresponds to the ``writes`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -813,10 +800,8 @@ def commit( # there are no flattened fields. if not isinstance(request, firestore.CommitRequest): request = firestore.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if database is not None: request.database = database if writes is not None: @@ -868,7 +853,6 @@ def rollback( This corresponds to the ``transaction`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -891,10 +875,8 @@ def rollback( # there are no flattened fields. if not isinstance(request, firestore.RollbackRequest): request = firestore.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if database is not None: request.database = database if transaction is not None: @@ -929,7 +911,6 @@ def run_query( request (google.cloud.firestore_v1.types.RunQueryRequest): The request object. The request for [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -943,7 +924,6 @@ def run_query( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.RunQueryRequest. # There's no risk of modifying the input as we've already verified @@ -985,7 +965,6 @@ def partition_query( request (google.cloud.firestore_v1.types.PartitionQueryRequest): The request object. The request for [Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1002,7 +981,6 @@ def partition_query( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.PartitionQueryRequest. # There's no risk of modifying the input as we've already verified @@ -1149,7 +1127,6 @@ def list_collection_ids( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1181,10 +1158,8 @@ def list_collection_ids( # there are no flattened fields. if not isinstance(request, firestore.ListCollectionIdsRequest): request = firestore.ListCollectionIdsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1234,7 +1209,6 @@ def batch_write( request (google.cloud.firestore_v1.types.BatchWriteRequest): The request object. The request for [Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1248,7 +1222,6 @@ def batch_write( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.BatchWriteRequest. # There's no risk of modifying the input as we've already verified @@ -1286,7 +1259,6 @@ def create_document( request (google.cloud.firestore_v1.types.CreateDocumentRequest): The request object. The request for [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1300,7 +1272,6 @@ def create_document( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a firestore.CreateDocumentRequest. # There's no risk of modifying the input as we've already verified diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py index 8a74a14e45..0fae8a9d6e 100644 --- a/google/cloud/firestore_v1/services/firestore/pagers.py +++ b/google/cloud/firestore_v1/services/firestore/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -247,7 +245,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -375,7 +373,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1/services/firestore/transports/__init__.py index 11ecff7619..05085abe84 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/__init__.py +++ b/google/cloud/firestore_v1/services/firestore/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py index 8ae14a6298..8ed56ff3dc 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -38,6 +37,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class FirestoreTransport(abc.ABC): """Abstract transport class for Firestore.""" @@ -47,21 +57,24 @@ class FirestoreTransport(abc.ABC): "https://www.googleapis.com/auth/datastore", ) + DEFAULT_HOST: str = "firestore.googleapis.com" + def __init__( self, *, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -70,7 +83,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,29 +97,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -117,9 +177,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -133,9 +194,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -148,7 +210,10 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -161,9 +226,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -177,9 +243,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -193,9 +260,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -208,7 +276,10 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -221,9 +292,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -237,9 +309,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -253,9 +326,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -272,9 +346,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=86400.0, ), @@ -288,9 +363,10 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -304,7 +380,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, exceptions.ServiceUnavailable, + core_exceptions.Aborted, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -317,7 +395,10 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -328,20 +409,19 @@ def _prep_wrapped_messages(self, client_info): @property def get_document( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.GetDocumentRequest], - typing.Union[document.Document, typing.Awaitable[document.Document]], + Union[document.Document, Awaitable[document.Document]], ]: raise NotImplementedError() @property def list_documents( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.ListDocumentsRequest], - typing.Union[ - firestore.ListDocumentsResponse, - typing.Awaitable[firestore.ListDocumentsResponse], + Union[ + firestore.ListDocumentsResponse, Awaitable[firestore.ListDocumentsResponse] ], ]: raise NotImplementedError() @@ -349,29 +429,29 @@ def list_documents( @property def update_document( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.UpdateDocumentRequest], - typing.Union[gf_document.Document, typing.Awaitable[gf_document.Document]], + Union[gf_document.Document, Awaitable[gf_document.Document]], ]: raise NotImplementedError() @property def delete_document( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.DeleteDocumentRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def batch_get_documents( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.BatchGetDocumentsRequest], - typing.Union[ + Union[ firestore.BatchGetDocumentsResponse, - typing.Awaitable[firestore.BatchGetDocumentsResponse], + Awaitable[firestore.BatchGetDocumentsResponse], ], ]: raise NotImplementedError() @@ -379,11 +459,11 @@ def batch_get_documents( @property def begin_transaction( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.BeginTransactionRequest], - typing.Union[ + Union[ firestore.BeginTransactionResponse, - typing.Awaitable[firestore.BeginTransactionResponse], + Awaitable[firestore.BeginTransactionResponse], ], ]: raise NotImplementedError() @@ -391,42 +471,37 @@ def begin_transaction( @property def commit( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.CommitRequest], - typing.Union[ - firestore.CommitResponse, typing.Awaitable[firestore.CommitResponse] - ], + Union[firestore.CommitResponse, Awaitable[firestore.CommitResponse]], ]: raise NotImplementedError() @property def rollback( self, - ) -> typing.Callable[ - [firestore.RollbackRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [firestore.RollbackRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def run_query( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.RunQueryRequest], - typing.Union[ - firestore.RunQueryResponse, typing.Awaitable[firestore.RunQueryResponse] - ], + Union[firestore.RunQueryResponse, Awaitable[firestore.RunQueryResponse]], ]: raise NotImplementedError() @property def partition_query( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.PartitionQueryRequest], - typing.Union[ + Union[ firestore.PartitionQueryResponse, - typing.Awaitable[firestore.PartitionQueryResponse], + Awaitable[firestore.PartitionQueryResponse], ], ]: raise NotImplementedError() @@ -434,33 +509,29 @@ def partition_query( @property def write( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.WriteRequest], - typing.Union[ - firestore.WriteResponse, typing.Awaitable[firestore.WriteResponse] - ], + Union[firestore.WriteResponse, Awaitable[firestore.WriteResponse]], ]: raise NotImplementedError() @property def listen( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.ListenRequest], - typing.Union[ - firestore.ListenResponse, typing.Awaitable[firestore.ListenResponse] - ], + Union[firestore.ListenResponse, Awaitable[firestore.ListenResponse]], ]: raise NotImplementedError() @property def list_collection_ids( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.ListCollectionIdsRequest], - typing.Union[ + Union[ firestore.ListCollectionIdsResponse, - typing.Awaitable[firestore.ListCollectionIdsResponse], + Awaitable[firestore.ListCollectionIdsResponse], ], ]: raise NotImplementedError() @@ -468,20 +539,18 @@ def list_collection_ids( @property def batch_write( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.BatchWriteRequest], - typing.Union[ - firestore.BatchWriteResponse, typing.Awaitable[firestore.BatchWriteResponse] - ], + Union[firestore.BatchWriteResponse, Awaitable[firestore.BatchWriteResponse]], ]: raise NotImplementedError() @property def create_document( self, - ) -> typing.Callable[ + ) -> Callable[ [firestore.CreateDocumentRequest], - typing.Union[document.Document, typing.Awaitable[document.Document]], + Union[document.Document, Awaitable[document.Document]], ]: raise NotImplementedError() diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py index 82aa10fba6..6a2cd14b3f 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -29,8 +27,7 @@ from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import FirestoreTransport, DEFAULT_CLIENT_INFO @@ -60,7 +57,7 @@ def __init__( self, *, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -74,7 +71,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -184,7 +182,7 @@ def __init__( def create_channel( cls, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -215,13 +213,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -312,7 +312,7 @@ def update_document( @property def delete_document( self, - ) -> Callable[[firestore.DeleteDocumentRequest], empty.Empty]: + ) -> Callable[[firestore.DeleteDocumentRequest], empty_pb2.Empty]: r"""Return a callable for the delete document method over gRPC. Deletes a document. @@ -331,7 +331,7 @@ def delete_document( self._stubs["delete_document"] = self.grpc_channel.unary_unary( "/google.firestore.v1.Firestore/DeleteDocument", request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_document"] @@ -419,7 +419,7 @@ def commit(self) -> Callable[[firestore.CommitRequest], firestore.CommitResponse return self._stubs["commit"] @property - def rollback(self) -> Callable[[firestore.RollbackRequest], empty.Empty]: + def rollback(self) -> Callable[[firestore.RollbackRequest], empty_pb2.Empty]: r"""Return a callable for the rollback method over gRPC. Rolls back a transaction. @@ -438,7 +438,7 @@ def rollback(self) -> Callable[[firestore.RollbackRequest], empty.Empty]: self._stubs["rollback"] = self.grpc_channel.unary_unary( "/google.firestore.v1.Firestore/Rollback", request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["rollback"] diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py index 40165168eb..1705e72fc9 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -30,8 +28,7 @@ from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import FirestoreTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreGrpcTransport @@ -63,7 +60,7 @@ class FirestoreGrpcAsyncIOTransport(FirestoreTransport): def create_channel( cls, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -90,13 +87,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -104,7 +103,7 @@ def __init__( self, *, host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -118,7 +117,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -176,7 +176,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -318,7 +317,7 @@ def update_document( @property def delete_document( self, - ) -> Callable[[firestore.DeleteDocumentRequest], Awaitable[empty.Empty]]: + ) -> Callable[[firestore.DeleteDocumentRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete document method over gRPC. Deletes a document. @@ -337,7 +336,7 @@ def delete_document( self._stubs["delete_document"] = self.grpc_channel.unary_unary( "/google.firestore.v1.Firestore/DeleteDocument", request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_document"] @@ -429,7 +428,9 @@ def commit( return self._stubs["commit"] @property - def rollback(self) -> Callable[[firestore.RollbackRequest], Awaitable[empty.Empty]]: + def rollback( + self, + ) -> Callable[[firestore.RollbackRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the rollback method over gRPC. Rolls back a transaction. @@ -448,7 +449,7 @@ def rollback(self) -> Callable[[firestore.RollbackRequest], Awaitable[empty.Empt self._stubs["rollback"] = self.grpc_channel.unary_unary( "/google.firestore.v1.Firestore/Rollback", request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["rollback"] diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py index a353384a95..3bcdca10a7 100644 --- a/google/cloud/firestore_v1/types/__init__.py +++ b/google/cloud/firestore_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .common import ( DocumentMask, Precondition, diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py index 2fc5171d6c..939840a52a 100644 --- a/google/cloud/firestore_v1/types/common.py +++ b/google/cloud/firestore_v1/types/common.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -41,7 +38,7 @@ class DocumentMask(proto.Message): field path syntax reference. """ - field_paths = proto.RepeatedField(proto.STRING, number=1) + field_paths = proto.RepeatedField(proto.STRING, number=1,) class Precondition(proto.Message): @@ -57,16 +54,17 @@ class Precondition(proto.Message): have been last updated at that time. """ - exists = proto.Field(proto.BOOL, number=1, oneof="condition_type") - + exists = proto.Field(proto.BOOL, number=1, oneof="condition_type",) update_time = proto.Field( - proto.MESSAGE, number=2, oneof="condition_type", message=timestamp.Timestamp, + proto.MESSAGE, + number=2, + oneof="condition_type", + message=timestamp_pb2.Timestamp, ) class TransactionOptions(proto.Message): r"""Options for creating a new transaction. - Attributes: read_only (google.cloud.firestore_v1.types.TransactionOptions.ReadOnly): The transaction can only be used for read @@ -85,7 +83,7 @@ class ReadWrite(proto.Message): An optional transaction to retry. """ - retry_transaction = proto.Field(proto.BYTES, number=1) + retry_transaction = proto.Field(proto.BYTES, number=1,) class ReadOnly(proto.Message): r"""Options for a transaction that can only be used to read @@ -101,11 +99,10 @@ class ReadOnly(proto.Message): proto.MESSAGE, number=2, oneof="consistency_selector", - message=timestamp.Timestamp, + message=timestamp_pb2.Timestamp, ) read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) - read_write = proto.Field(proto.MESSAGE, number=3, oneof="mode", message=ReadWrite,) diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py index 26ecf45cf5..68631cb725 100644 --- a/google/cloud/firestore_v1/types/document.py +++ b/google/cloud/firestore_v1/types/document.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.type import latlng_pb2 as latlng # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore __protobuf__ = proto.module( @@ -80,18 +77,14 @@ class Document(proto.Message): ``read_time`` of a query. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) fields = proto.MapField(proto.STRING, proto.MESSAGE, number=2, message="Value",) - - create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class Value(proto.Message): r"""A message that can hold any of the supported value types. - Attributes: null_value (google.protobuf.struct_pb2.NullValue): A null value. @@ -132,33 +125,23 @@ class Value(proto.Message): """ null_value = proto.Field( - proto.ENUM, number=11, oneof="value_type", enum=struct.NullValue, + proto.ENUM, number=11, oneof="value_type", enum=struct_pb2.NullValue, ) - - boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type") - - integer_value = proto.Field(proto.INT64, number=2, oneof="value_type") - - double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type") - + boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type",) + integer_value = proto.Field(proto.INT64, number=2, oneof="value_type",) + double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type",) timestamp_value = proto.Field( - proto.MESSAGE, number=10, oneof="value_type", message=timestamp.Timestamp, + proto.MESSAGE, number=10, oneof="value_type", message=timestamp_pb2.Timestamp, ) - - string_value = proto.Field(proto.STRING, number=17, oneof="value_type") - - bytes_value = proto.Field(proto.BYTES, number=18, oneof="value_type") - - reference_value = proto.Field(proto.STRING, number=5, oneof="value_type") - + string_value = proto.Field(proto.STRING, number=17, oneof="value_type",) + bytes_value = proto.Field(proto.BYTES, number=18, oneof="value_type",) + reference_value = proto.Field(proto.STRING, number=5, oneof="value_type",) geo_point_value = proto.Field( - proto.MESSAGE, number=8, oneof="value_type", message=latlng.LatLng, + proto.MESSAGE, number=8, oneof="value_type", message=latlng_pb2.LatLng, ) - array_value = proto.Field( proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", ) - map_value = proto.Field( proto.MESSAGE, number=6, oneof="value_type", message="MapValue", ) @@ -166,7 +149,6 @@ class Value(proto.Message): class ArrayValue(proto.Message): r"""An array value. - Attributes: values (Sequence[google.cloud.firestore_v1.types.Value]): Values in the array. @@ -177,7 +159,6 @@ class ArrayValue(proto.Message): class MapValue(proto.Message): r"""A map value. - Attributes: fields (Sequence[google.cloud.firestore_v1.types.MapValue.FieldsEntry]): The map's fields. diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py index 78cfd5d7aa..405ee02703 100644 --- a/google/cloud/firestore_v1/types/firestore.py +++ b/google/cloud/firestore_v1/types/firestore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import query as gf_query from google.cloud.firestore_v1.types import write -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as gr_status # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore __protobuf__ = proto.module( @@ -83,17 +80,14 @@ class GetDocumentRequest(proto.Message): seconds. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) mask = proto.Field(proto.MESSAGE, number=2, message=common.DocumentMask,) - - transaction = proto.Field(proto.BYTES, number=3, oneof="consistency_selector") - + transaction = proto.Field(proto.BYTES, number=3, oneof="consistency_selector",) read_time = proto.Field( proto.MESSAGE, number=5, oneof="consistency_selector", - message=timestamp.Timestamp, + message=timestamp_pb2.Timestamp, ) @@ -146,28 +140,20 @@ class ListDocumentsRequest(proto.Message): ``order_by``. """ - parent = proto.Field(proto.STRING, number=1) - - collection_id = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - page_token = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=6) - + parent = proto.Field(proto.STRING, number=1,) + collection_id = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + page_token = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=6,) mask = proto.Field(proto.MESSAGE, number=7, message=common.DocumentMask,) - - transaction = proto.Field(proto.BYTES, number=8, oneof="consistency_selector") - + transaction = proto.Field(proto.BYTES, number=8, oneof="consistency_selector",) read_time = proto.Field( proto.MESSAGE, number=10, oneof="consistency_selector", - message=timestamp.Timestamp, + message=timestamp_pb2.Timestamp, ) - - show_missing = proto.Field(proto.BOOL, number=12) + show_missing = proto.Field(proto.BOOL, number=12,) class ListDocumentsResponse(proto.Message): @@ -188,8 +174,7 @@ def raw_page(self): documents = proto.RepeatedField( proto.MESSAGE, number=1, message=gf_document.Document, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateDocumentRequest(proto.Message): @@ -220,14 +205,10 @@ class CreateDocumentRequest(proto.Message): the response. """ - parent = proto.Field(proto.STRING, number=1) - - collection_id = proto.Field(proto.STRING, number=2) - - document_id = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + collection_id = proto.Field(proto.STRING, number=2,) + document_id = proto.Field(proto.STRING, number=3,) document = proto.Field(proto.MESSAGE, number=4, message=gf_document.Document,) - mask = proto.Field(proto.MESSAGE, number=5, message=common.DocumentMask,) @@ -263,11 +244,8 @@ class UpdateDocumentRequest(proto.Message): """ document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,) - update_mask = proto.Field(proto.MESSAGE, number=2, message=common.DocumentMask,) - mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,) - current_document = proto.Field( proto.MESSAGE, number=4, message=common.Precondition, ) @@ -288,8 +266,7 @@ class DeleteDocumentRequest(proto.Message): by the target document. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) current_document = proto.Field( proto.MESSAGE, number=2, message=common.Precondition, ) @@ -327,26 +304,21 @@ class BatchGetDocumentsRequest(proto.Message): time. This may not be older than 270 seconds. """ - database = proto.Field(proto.STRING, number=1) - - documents = proto.RepeatedField(proto.STRING, number=2) - + database = proto.Field(proto.STRING, number=1,) + documents = proto.RepeatedField(proto.STRING, number=2,) mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,) - - transaction = proto.Field(proto.BYTES, number=4, oneof="consistency_selector") - + transaction = proto.Field(proto.BYTES, number=4, oneof="consistency_selector",) new_transaction = proto.Field( proto.MESSAGE, number=5, oneof="consistency_selector", message=common.TransactionOptions, ) - read_time = proto.Field( proto.MESSAGE, number=7, oneof="consistency_selector", - message=timestamp.Timestamp, + message=timestamp_pb2.Timestamp, ) @@ -376,12 +348,9 @@ class BatchGetDocumentsResponse(proto.Message): found = proto.Field( proto.MESSAGE, number=1, oneof="result", message=gf_document.Document, ) - - missing = proto.Field(proto.STRING, number=2, oneof="result") - - transaction = proto.Field(proto.BYTES, number=3) - - read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + missing = proto.Field(proto.STRING, number=2, oneof="result",) + transaction = proto.Field(proto.BYTES, number=3,) + read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class BeginTransactionRequest(proto.Message): @@ -397,8 +366,7 @@ class BeginTransactionRequest(proto.Message): Defaults to a read-write transaction. """ - database = proto.Field(proto.STRING, number=1) - + database = proto.Field(proto.STRING, number=1,) options = proto.Field(proto.MESSAGE, number=2, message=common.TransactionOptions,) @@ -411,7 +379,7 @@ class BeginTransactionResponse(proto.Message): The transaction that was started. """ - transaction = proto.Field(proto.BYTES, number=1) + transaction = proto.Field(proto.BYTES, number=1,) class CommitRequest(proto.Message): @@ -430,11 +398,9 @@ class CommitRequest(proto.Message): transaction, and commits it. """ - database = proto.Field(proto.STRING, number=1) - + database = proto.Field(proto.STRING, number=1,) writes = proto.RepeatedField(proto.MESSAGE, number=2, message=write.Write,) - - transaction = proto.Field(proto.BYTES, number=3) + transaction = proto.Field(proto.BYTES, number=3,) class CommitResponse(proto.Message): @@ -455,8 +421,7 @@ class CommitResponse(proto.Message): write_results = proto.RepeatedField( proto.MESSAGE, number=1, message=write.WriteResult, ) - - commit_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + commit_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) class RollbackRequest(proto.Message): @@ -471,9 +436,8 @@ class RollbackRequest(proto.Message): Required. The transaction to roll back. """ - database = proto.Field(proto.STRING, number=1) - - transaction = proto.Field(proto.BYTES, number=2) + database = proto.Field(proto.STRING, number=1,) + transaction = proto.Field(proto.BYTES, number=2,) class RunQueryRequest(proto.Message): @@ -503,26 +467,22 @@ class RunQueryRequest(proto.Message): time. This may not be older than 270 seconds. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) structured_query = proto.Field( proto.MESSAGE, number=2, oneof="query_type", message=gf_query.StructuredQuery, ) - - transaction = proto.Field(proto.BYTES, number=5, oneof="consistency_selector") - + transaction = proto.Field(proto.BYTES, number=5, oneof="consistency_selector",) new_transaction = proto.Field( proto.MESSAGE, number=6, oneof="consistency_selector", message=common.TransactionOptions, ) - read_time = proto.Field( proto.MESSAGE, number=7, oneof="consistency_selector", - message=timestamp.Timestamp, + message=timestamp_pb2.Timestamp, ) @@ -555,13 +515,10 @@ class RunQueryResponse(proto.Message): the current response. """ - transaction = proto.Field(proto.BYTES, number=2) - + transaction = proto.Field(proto.BYTES, number=2,) document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,) - - read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - skipped_results = proto.Field(proto.INT32, number=4) + read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + skipped_results = proto.Field(proto.INT32, number=4,) class PartitionQueryRequest(proto.Message): @@ -621,17 +578,13 @@ class PartitionQueryRequest(proto.Message): ``partition_count``. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) structured_query = proto.Field( proto.MESSAGE, number=2, oneof="query_type", message=gf_query.StructuredQuery, ) - - partition_count = proto.Field(proto.INT64, number=3) - - page_token = proto.Field(proto.STRING, number=4) - - page_size = proto.Field(proto.INT32, number=5) + partition_count = proto.Field(proto.INT64, number=3,) + page_token = proto.Field(proto.STRING, number=4,) + page_size = proto.Field(proto.INT32, number=5,) class PartitionQueryResponse(proto.Message): @@ -669,8 +622,7 @@ def raw_page(self): return self partitions = proto.RepeatedField(proto.MESSAGE, number=1, message=gf_query.Cursor,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class WriteRequest(proto.Message): @@ -723,15 +675,11 @@ class WriteRequest(proto.Message): Labels associated with this write request. """ - database = proto.Field(proto.STRING, number=1) - - stream_id = proto.Field(proto.STRING, number=2) - + database = proto.Field(proto.STRING, number=1,) + stream_id = proto.Field(proto.STRING, number=2,) writes = proto.RepeatedField(proto.MESSAGE, number=3, message=write.Write,) - - stream_token = proto.Field(proto.BYTES, number=4) - - labels = proto.MapField(proto.STRING, proto.STRING, number=5) + stream_token = proto.Field(proto.BYTES, number=4,) + labels = proto.MapField(proto.STRING, proto.STRING, number=5,) class WriteResponse(proto.Message): @@ -758,15 +706,12 @@ class WriteResponse(proto.Message): effects of the write. """ - stream_id = proto.Field(proto.STRING, number=1) - - stream_token = proto.Field(proto.BYTES, number=2) - + stream_id = proto.Field(proto.STRING, number=1,) + stream_token = proto.Field(proto.BYTES, number=2,) write_results = proto.RepeatedField( proto.MESSAGE, number=3, message=write.WriteResult, ) - - commit_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + commit_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class ListenRequest(proto.Message): @@ -786,15 +731,12 @@ class ListenRequest(proto.Message): Labels associated with this target change. """ - database = proto.Field(proto.STRING, number=1) - + database = proto.Field(proto.STRING, number=1,) add_target = proto.Field( proto.MESSAGE, number=2, oneof="target_change", message="Target", ) - - remove_target = proto.Field(proto.INT32, number=3, oneof="target_change") - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) + remove_target = proto.Field(proto.INT32, number=3, oneof="target_change",) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) class ListenResponse(proto.Message): @@ -824,19 +766,15 @@ class ListenResponse(proto.Message): target_change = proto.Field( proto.MESSAGE, number=2, oneof="response_type", message="TargetChange", ) - document_change = proto.Field( proto.MESSAGE, number=3, oneof="response_type", message=write.DocumentChange, ) - document_delete = proto.Field( proto.MESSAGE, number=4, oneof="response_type", message=write.DocumentDelete, ) - document_remove = proto.Field( proto.MESSAGE, number=6, oneof="response_type", message=write.DocumentRemove, ) - filter = proto.Field( proto.MESSAGE, number=5, oneof="response_type", message=write.ExistenceFilter, ) @@ -844,7 +782,6 @@ class ListenResponse(proto.Message): class Target(proto.Message): r"""A specification of a set of documents to listen to. - Attributes: query (google.cloud.firestore_v1.types.Target.QueryTarget): A target specified by a query. @@ -874,7 +811,6 @@ class Target(proto.Message): class DocumentsTarget(proto.Message): r"""A target specified by a set of documents names. - Attributes: documents (Sequence[str]): The names of the documents to retrieve. In the format: @@ -884,11 +820,10 @@ class DocumentsTarget(proto.Message): elided. """ - documents = proto.RepeatedField(proto.STRING, number=2) + documents = proto.RepeatedField(proto.STRING, number=2,) class QueryTarget(proto.Message): r"""A target specified by a query. - Attributes: parent (str): The parent resource name. In the format: @@ -902,8 +837,7 @@ class QueryTarget(proto.Message): A structured query. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) structured_query = proto.Field( proto.MESSAGE, number=2, @@ -914,25 +848,19 @@ class QueryTarget(proto.Message): query = proto.Field( proto.MESSAGE, number=2, oneof="target_type", message=QueryTarget, ) - documents = proto.Field( proto.MESSAGE, number=3, oneof="target_type", message=DocumentsTarget, ) - - resume_token = proto.Field(proto.BYTES, number=4, oneof="resume_type") - + resume_token = proto.Field(proto.BYTES, number=4, oneof="resume_type",) read_time = proto.Field( - proto.MESSAGE, number=11, oneof="resume_type", message=timestamp.Timestamp, + proto.MESSAGE, number=11, oneof="resume_type", message=timestamp_pb2.Timestamp, ) - - target_id = proto.Field(proto.INT32, number=5) - - once = proto.Field(proto.BOOL, number=6) + target_id = proto.Field(proto.INT32, number=5,) + once = proto.Field(proto.BOOL, number=6,) class TargetChange(proto.Message): r"""Targets being watched have changed. - Attributes: target_change_type (google.cloud.firestore_v1.types.TargetChange.TargetChangeType): The type of change that occurred. @@ -973,14 +901,10 @@ class TargetChangeType(proto.Enum): RESET = 4 target_change_type = proto.Field(proto.ENUM, number=1, enum=TargetChangeType,) - - target_ids = proto.RepeatedField(proto.INT32, number=2) - - cause = proto.Field(proto.MESSAGE, number=3, message=gr_status.Status,) - - resume_token = proto.Field(proto.BYTES, number=4) - - read_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + target_ids = proto.RepeatedField(proto.INT32, number=2,) + cause = proto.Field(proto.MESSAGE, number=3, message=status_pb2.Status,) + resume_token = proto.Field(proto.BYTES, number=4,) + read_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) class ListCollectionIdsRequest(proto.Message): @@ -1000,11 +924,9 @@ class ListCollectionIdsRequest(proto.Message): [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse]. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListCollectionIdsResponse(proto.Message): @@ -1023,9 +945,8 @@ class ListCollectionIdsResponse(proto.Message): def raw_page(self): return self - collection_ids = proto.RepeatedField(proto.STRING, number=1) - - next_page_token = proto.Field(proto.STRING, number=2) + collection_ids = proto.RepeatedField(proto.STRING, number=1,) + next_page_token = proto.Field(proto.STRING, number=2,) class BatchWriteRequest(proto.Message): @@ -1046,11 +967,9 @@ class BatchWriteRequest(proto.Message): Labels associated with this batch write. """ - database = proto.Field(proto.STRING, number=1) - + database = proto.Field(proto.STRING, number=1,) writes = proto.RepeatedField(proto.MESSAGE, number=2, message=write.Write,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) class BatchWriteResponse(proto.Message): @@ -1071,8 +990,7 @@ class BatchWriteResponse(proto.Message): write_results = proto.RepeatedField( proto.MESSAGE, number=1, message=write.WriteResult, ) - - status = proto.RepeatedField(proto.MESSAGE, number=2, message=gr_status.Status,) + status = proto.RepeatedField(proto.MESSAGE, number=2, message=status_pb2.Status,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py index 2105e0d24a..dea272dd51 100644 --- a/google/cloud/firestore_v1/types/query.py +++ b/google/cloud/firestore_v1/types/query.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.firestore_v1.types import document -from google.protobuf import wrappers_pb2 as wrappers # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore __protobuf__ = proto.module( @@ -29,7 +26,6 @@ class StructuredQuery(proto.Message): r"""A Firestore query. - Attributes: select (google.cloud.firestore_v1.types.StructuredQuery.Projection): The projection to return. @@ -81,7 +77,6 @@ class Direction(proto.Enum): class CollectionSelector(proto.Message): r"""A selection of a collection, such as ``messages as m1``. - Attributes: collection_id (str): The collection ID. @@ -93,13 +88,11 @@ class CollectionSelector(proto.Message): collections. """ - collection_id = proto.Field(proto.STRING, number=2) - - all_descendants = proto.Field(proto.BOOL, number=3) + collection_id = proto.Field(proto.STRING, number=2,) + all_descendants = proto.Field(proto.BOOL, number=3,) class Filter(proto.Message): r"""A filter. - Attributes: composite_filter (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter): A composite filter. @@ -115,14 +108,12 @@ class Filter(proto.Message): oneof="filter_type", message="StructuredQuery.CompositeFilter", ) - field_filter = proto.Field( proto.MESSAGE, number=2, oneof="filter_type", message="StructuredQuery.FieldFilter", ) - unary_filter = proto.Field( proto.MESSAGE, number=3, @@ -150,14 +141,12 @@ class Operator(proto.Enum): op = proto.Field( proto.ENUM, number=1, enum="StructuredQuery.CompositeFilter.Operator", ) - filters = proto.RepeatedField( proto.MESSAGE, number=2, message="StructuredQuery.Filter", ) class FieldFilter(proto.Message): r"""A filter on a specific field. - Attributes: field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): The field to filter by. @@ -184,16 +173,13 @@ class Operator(proto.Enum): field = proto.Field( proto.MESSAGE, number=1, message="StructuredQuery.FieldReference", ) - op = proto.Field( proto.ENUM, number=2, enum="StructuredQuery.FieldFilter.Operator", ) - value = proto.Field(proto.MESSAGE, number=3, message=document.Value,) class UnaryFilter(proto.Message): r"""A filter with a single operand. - Attributes: op (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter.Operator): The unary operator to apply. @@ -212,7 +198,6 @@ class Operator(proto.Enum): op = proto.Field( proto.ENUM, number=1, enum="StructuredQuery.UnaryFilter.Operator", ) - field = proto.Field( proto.MESSAGE, number=2, @@ -222,7 +207,6 @@ class Operator(proto.Enum): class Order(proto.Message): r"""An order on a field. - Attributes: field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference): The field to order by. @@ -233,22 +217,19 @@ class Order(proto.Message): field = proto.Field( proto.MESSAGE, number=1, message="StructuredQuery.FieldReference", ) - direction = proto.Field(proto.ENUM, number=2, enum="StructuredQuery.Direction",) class FieldReference(proto.Message): r"""A reference to a field, such as ``max(messages.time) as max_time``. - Attributes: field_path (str): """ - field_path = proto.Field(proto.STRING, number=2) + field_path = proto.Field(proto.STRING, number=2,) class Projection(proto.Message): r"""The projection of document's fields to return. - Attributes: fields (Sequence[google.cloud.firestore_v1.types.StructuredQuery.FieldReference]): The fields to return. @@ -262,25 +243,17 @@ class Projection(proto.Message): ) select = proto.Field(proto.MESSAGE, number=1, message=Projection,) - from_ = proto.RepeatedField(proto.MESSAGE, number=2, message=CollectionSelector,) - where = proto.Field(proto.MESSAGE, number=3, message=Filter,) - order_by = proto.RepeatedField(proto.MESSAGE, number=4, message=Order,) - start_at = proto.Field(proto.MESSAGE, number=7, message="Cursor",) - end_at = proto.Field(proto.MESSAGE, number=8, message="Cursor",) - - offset = proto.Field(proto.INT32, number=6) - - limit = proto.Field(proto.MESSAGE, number=5, message=wrappers.Int32Value,) + offset = proto.Field(proto.INT32, number=6,) + limit = proto.Field(proto.MESSAGE, number=5, message=wrappers_pb2.Int32Value,) class Cursor(proto.Message): r"""A position in a query result set. - Attributes: values (Sequence[google.cloud.firestore_v1.types.Value]): The values that represent a position, in the @@ -295,8 +268,7 @@ class Cursor(proto.Message): """ values = proto.RepeatedField(proto.MESSAGE, number=1, message=document.Value,) - - before = proto.Field(proto.BOOL, number=2) + before = proto.Field(proto.BOOL, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py index 06c715292e..8e5b4d920d 100644 --- a/google/cloud/firestore_v1/types/write.py +++ b/google/cloud/firestore_v1/types/write.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document as gf_document -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -39,7 +36,6 @@ class Write(proto.Message): r"""A write on a document. - Attributes: update (google.cloud.firestore_v1.types.Document): A document to write. @@ -75,19 +71,14 @@ class Write(proto.Message): update = proto.Field( proto.MESSAGE, number=1, oneof="operation", message=gf_document.Document, ) - - delete = proto.Field(proto.STRING, number=2, oneof="operation") - + delete = proto.Field(proto.STRING, number=2, oneof="operation",) transform = proto.Field( proto.MESSAGE, number=6, oneof="operation", message="DocumentTransform", ) - update_mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,) - update_transforms = proto.RepeatedField( proto.MESSAGE, number=7, message="DocumentTransform.FieldTransform", ) - current_document = proto.Field( proto.MESSAGE, number=4, message=common.Precondition, ) @@ -95,7 +86,6 @@ class Write(proto.Message): class DocumentTransform(proto.Message): r"""A transformation of a document. - Attributes: document (str): The name of the document to transform. @@ -107,7 +97,6 @@ class DocumentTransform(proto.Message): class FieldTransform(proto.Message): r"""A transformation of a field of the document. - Attributes: field_path (str): The path of the field. See @@ -195,34 +184,28 @@ class ServerValue(proto.Enum): SERVER_VALUE_UNSPECIFIED = 0 REQUEST_TIME = 1 - field_path = proto.Field(proto.STRING, number=1) - + field_path = proto.Field(proto.STRING, number=1,) set_to_server_value = proto.Field( proto.ENUM, number=2, oneof="transform_type", enum="DocumentTransform.FieldTransform.ServerValue", ) - increment = proto.Field( proto.MESSAGE, number=3, oneof="transform_type", message=gf_document.Value, ) - maximum = proto.Field( proto.MESSAGE, number=4, oneof="transform_type", message=gf_document.Value, ) - minimum = proto.Field( proto.MESSAGE, number=5, oneof="transform_type", message=gf_document.Value, ) - append_missing_elements = proto.Field( proto.MESSAGE, number=6, oneof="transform_type", message=gf_document.ArrayValue, ) - remove_all_from_array = proto.Field( proto.MESSAGE, number=7, @@ -230,8 +213,7 @@ class ServerValue(proto.Enum): message=gf_document.ArrayValue, ) - document = proto.Field(proto.STRING, number=1) - + document = proto.Field(proto.STRING, number=1,) field_transforms = proto.RepeatedField( proto.MESSAGE, number=2, message=FieldTransform, ) @@ -239,7 +221,6 @@ class ServerValue(proto.Enum): class WriteResult(proto.Message): r"""The result of applying a write. - Attributes: update_time (google.protobuf.timestamp_pb2.Timestamp): The last update time of the document after applying the @@ -253,8 +234,7 @@ class WriteResult(proto.Message): in the same order. """ - update_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - + update_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) transform_results = proto.RepeatedField( proto.MESSAGE, number=2, message=gf_document.Value, ) @@ -287,10 +267,8 @@ class DocumentChange(proto.Message): """ document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,) - - target_ids = proto.RepeatedField(proto.INT32, number=5) - - removed_target_ids = proto.RepeatedField(proto.INT32, number=6) + target_ids = proto.RepeatedField(proto.INT32, number=5,) + removed_target_ids = proto.RepeatedField(proto.INT32, number=6,) class DocumentDelete(proto.Message): @@ -317,11 +295,9 @@ class DocumentDelete(proto.Message): Greater or equal to the ``commit_time`` of the delete. """ - document = proto.Field(proto.STRING, number=1) - - removed_target_ids = proto.RepeatedField(proto.INT32, number=6) - - read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + document = proto.Field(proto.STRING, number=1,) + removed_target_ids = proto.RepeatedField(proto.INT32, number=6,) + read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class DocumentRemove(proto.Message): @@ -351,16 +327,13 @@ class DocumentRemove(proto.Message): change/delete/remove. """ - document = proto.Field(proto.STRING, number=1) - - removed_target_ids = proto.RepeatedField(proto.INT32, number=2) - - read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + document = proto.Field(proto.STRING, number=1,) + removed_target_ids = proto.RepeatedField(proto.INT32, number=2,) + read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class ExistenceFilter(proto.Message): r"""A digest of all the documents that match a given target. - Attributes: target_id (int): The target ID to which this filter applies. @@ -373,9 +346,8 @@ class ExistenceFilter(proto.Message): longer match the target. """ - target_id = proto.Field(proto.INT32, number=1) - - count = proto.Field(proto.INT32, number=2) + target_id = proto.Field(proto.INT32, number=1,) + count = proto.Field(proto.INT32, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 2f33a7170e..ff4bb10c4c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,7 +30,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.7"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -63,16 +63,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -142,9 +135,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") @@ -174,7 +164,7 @@ def system(session): if system_test_exists: session.run( "py.test", - "--quiet", + "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, @@ -182,7 +172,7 @@ def system(session): if system_test_folder_exists: session.run( "py.test", - "--quiet", + "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, @@ -207,7 +197,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -229,7 +219,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 0000000000..f4cf08e0a8 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,269 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" +from pathlib import Path +from typing import List, Optional + +import synthtool as s +from synthtool import gcp + +common = gcp.CommonTemplates() + +# This library ships clients for 3 different APIs, +# firestore, firestore_admin and firestore_bundle. +# firestore_bundle is not versioned +firestore_default_version = "v1" +firestore_admin_default_version = "v1" + +# This is a customized version of the s.get_staging_dirs() function from synthtool to +# cater for copying 3 different folders from googleapis-gen +# which are firestore, firestore/admin and firestore/bundle. +# Source https://github.com/googleapis/synthtool/blob/master/synthtool/transforms.py#L280 +def get_staging_dirs( + default_version: Optional[str] = None, sub_directory: Optional[str] = None +) -> List[Path]: + """Returns the list of directories, one per version, copied from + https://github.com/googleapis/googleapis-gen. Will return in lexical sorting + order with the exception of the default_version which will be last (if specified). + + Args: + default_version (str): the default version of the API. The directory for this version + will be the last item in the returned list if specified. + sub_directory (str): if a `sub_directory` is provided, only the directories within the + specified `sub_directory` will be returned. + + Returns: the empty list if no file were copied. + """ + + staging = Path("owl-bot-staging") + + if sub_directory: + staging /= sub_directory + + if staging.is_dir(): + # Collect the subdirectories of the staging directory. + versions = [v.name for v in staging.iterdir() if v.is_dir()] + # Reorder the versions so the default version always comes last. + versions = [v for v in versions if v != default_version] + versions.sort() + if default_version is not None: + versions += [default_version] + dirs = [staging / v for v in versions] + for dir in dirs: + s._tracked_paths.add(dir) + return dirs + else: + return [] + +def update_fixup_scripts(library): + # Add message for missing 'libcst' dependency + s.replace( + library / "scripts/fixup*.py", + """import libcst as cst""", + """try: + import libcst as cst +except ImportError: + raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') + + + """, + ) + +for library in get_staging_dirs(default_version=firestore_default_version, sub_directory="firestore"): + s.move(library / f"google/cloud/firestore_{library.name}", excludes=[f"__init__.py"]) + s.move(library / f"tests/", f"tests") + update_fixup_scripts(library) + s.move(library / "scripts") + +for library in get_staging_dirs(default_version=firestore_admin_default_version, sub_directory="firestore_admin"): + s.move(library / f"google/cloud/firestore_admin_{library.name}", excludes=[f"__init__.py"]) + s.move(library / f"tests", f"tests") + update_fixup_scripts(library) + s.move(library / "scripts") + +for library in get_staging_dirs(sub_directory="firestore_bundle"): + s.replace( + library / "google/cloud/bundle/types/bundle.py", + "from google.firestore.v1 import document_pb2 # type: ignore\n" + "from google.firestore.v1 import query_pb2 # type: ignore", + "from google.cloud.firestore_v1.types import document as document_pb2 # type: ignore\n" + "from google.cloud.firestore_v1.types import query as query_pb2 # type: ignore" + ) + + s.replace( + library / "google/cloud/bundle/__init__.py", + "from .types.bundle import BundleMetadata\n" + "from .types.bundle import NamedQuery\n", + "from .types.bundle import BundleMetadata\n" + "from .types.bundle import NamedQuery\n" + "\n" + "from .bundle import FirestoreBundle\n", + ) + + s.replace( + library / "google/cloud/bundle/__init__.py", + "\'BundledQuery\',", + "\"BundledQuery\",\n\"FirestoreBundle\",", + ) + + s.move( + library / f"google/cloud/bundle", + f"google/cloud/firestore_bundle", + ) + s.move(library / f"tests", f"tests") + +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library( + samples=False, # set to True only if there are samples + system_test_python_versions=["3.7"], + unit_test_external_dependencies=["aiounittest"], + system_test_external_dependencies=["pytest-asyncio"], + microgenerator=True, + cov_level=100, +) + +s.move(templated_files) + +s.replace( + "noxfile.py", + """\"--quiet\", + f\"--junitxml=system_\{session.python\}_sponge_log.xml\", + system_test""", + """\"--verbose\", + f\"--junitxml=system_{session.python}_sponge_log.xml\", + system_test""", +) + +# Add pytype support +s.replace( + ".gitignore", + """\ +.pytest_cache +""", + """\ +.pytest_cache +.pytype +""", +) + +s.replace( + ".gitignore", + """\ +pylintrc +pylintrc.test +""", + """\ +pylintrc +pylintrc.test +.make/** +""", +) + +s.replace( + "setup.cfg", + """\ +universal = 1 +""", + """\ +universal = 1 +[pytype] +python_version = 3.8 +inputs = + google/cloud/ +exclude = + tests/ +output = .pytype/ +# Workaround for https://github.com/google/pytype/issues/150 +disable = pyi-error +""", +) + +s.replace( + "noxfile.py", + """\ +BLACK_VERSION = "black==19.10b0" +""", + """\ +PYTYPE_VERSION = "pytype==2020.7.24" +BLACK_VERSION = "black==19.10b0" +""", +) + +s.replace( + "noxfile.py", + """\ +@nox.session\(python=DEFAULT_PYTHON_VERSION\) +def lint_setup_py\(session\): +""", + '''\ +@nox.session(python="3.7") +def pytype(session): + """Run pytype + """ + session.install(PYTYPE_VERSION) + session.run("pytype",) +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): +''', +) + +s.replace( + ".coveragerc", + """\ + raise NotImplementedError +omit = +""", + """\ + raise NotImplementedError + # Ignore setuptools-less fallback + except pkg_resources.DistributionNotFound: +omit = +""", +) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +s.replace( + ".kokoro/build.sh", + "# Setup service account credentials.", + """\ +# Setup firestore account credentials +export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json + +# Setup service account credentials.""", +) + + +# Add a section on updating conformance tests to contributing. +s.replace( + "CONTRIBUTING.rst", + "\nTest Coverage", + """************* +Updating Conformance Tests +************************** + +The firestore client libraries use a shared set of conformance tests, the source of which can be found at https://github.com/googleapis/conformance-tests. + +To update the copy of these conformance tests used by this repository, run the provided Makefile: + + $ make -f Makefile_v1 + +************* +Test Coverage""" +) diff --git a/renovate.json b/renovate.json index f08bc22c9a..c04895563e 100644 --- a/renovate.json +++ b/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/scripts/fixup_firestore_admin_v1_keywords.py b/scripts/fixup_firestore_admin_v1_keywords.py index 18985c9241..bd5f8dd368 100644 --- a/scripts/fixup_firestore_admin_v1_keywords.py +++ b/scripts/fixup_firestore_admin_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,16 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os - try: import libcst as cst except ImportError: raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') + import pathlib import sys from typing import (Any, Callable, Dict, List, Sequence, Tuple) @@ -47,16 +45,15 @@ def partition( class firestore_adminCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_index': ('parent', 'index', ), - 'delete_index': ('name', ), - 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', ), - 'get_field': ('name', ), - 'get_index': ('name', ), - 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', ), - 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), - 'update_field': ('field', 'update_mask', ), - + 'create_index': ('parent', 'index', ), + 'delete_index': ('name', ), + 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', ), + 'get_field': ('name', ), + 'get_index': ('name', ), + 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', ), + 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), + 'update_field': ('field', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -87,7 +84,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_firestore_v1_keywords.py b/scripts/fixup_firestore_v1_keywords.py index 9e3e6fba10..8f71f6285a 100644 --- a/scripts/fixup_firestore_v1_keywords.py +++ b/scripts/fixup_firestore_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,16 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os - try: import libcst as cst except ImportError: raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') + import pathlib import sys from typing import (Any, Callable, Dict, List, Sequence, Tuple) @@ -47,22 +45,21 @@ def partition( class firestoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ), - 'batch_write': ('database', 'writes', 'labels', ), - 'begin_transaction': ('database', 'options', ), - 'commit': ('database', 'writes', 'transaction', ), - 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ), - 'delete_document': ('name', 'current_document', ), - 'get_document': ('name', 'mask', 'transaction', 'read_time', ), - 'list_collection_ids': ('parent', 'page_size', 'page_token', ), - 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ), - 'listen': ('database', 'add_target', 'remove_target', 'labels', ), - 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', ), - 'rollback': ('database', 'transaction', ), - 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', ), - 'update_document': ('document', 'update_mask', 'mask', 'current_document', ), - 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ), - + 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ), + 'batch_write': ('database', 'writes', 'labels', ), + 'begin_transaction': ('database', 'options', ), + 'commit': ('database', 'writes', 'transaction', ), + 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ), + 'delete_document': ('name', 'current_document', ), + 'get_document': ('name', 'mask', 'transaction', 'read_time', ), + 'list_collection_ids': ('parent', 'page_size', 'page_token', ), + 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ), + 'listen': ('database', 'add_target', 'remove_target', 'labels', ), + 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', ), + 'rollback': ('database', 'transaction', ), + 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', ), + 'update_document': ('document', 'update_mask', 'mask', 'current_document', ), + 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -93,7 +90,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index 7bf61c02fa..56662a17bd 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-firestore" description = "Google Cloud Firestore API client library" -version = "2.1.1" +version = "2.1.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index ed1bfc8597..0000000000 --- a/synth.metadata +++ /dev/null @@ -1,55 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "git@github.com:googleapis/python-firestore", - "sha": "73346ebb223e773c5fe6c154de1332cb86b02e11" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fa7915f8d43926de5effb815129a274579fa84df", - "internalRef": "366869955" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "063de45298fbdd88916018ba566c7ecd254b39ae" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "firestore", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "firestore_admin", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - }, - { - "client": { - "source": "googleapis", - "apiName": "firestore-bundle", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ] -} \ No newline at end of file diff --git a/synth.py b/synth.py deleted file mode 100644 index 18e5bb9abd..0000000000 --- a/synth.py +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" -import synthtool as s -from synthtool import gcp - -AUTOSYNTH_MULTIPLE_PRS = True -AUTOSYNTH_MULTIPLE_COMMITS = True - -gapic = gcp.GAPICBazel() -common = gcp.CommonTemplates() -versions = ["v1"] -admin_versions = ["v1"] - - -# ---------------------------------------------------------------------------- -# Generate firestore GAPIC layer -# ---------------------------------------------------------------------------- -for version in versions: - library = gapic.py_library( - service="firestore", - version=version, - bazel_target=f"//google/firestore/{version}:firestore-{version}-py", - ) - - s.move( - library / f"google/cloud/firestore_{version}", - f"google/cloud/firestore_{version}", - excludes=[library / f"google/cloud/firestore_{version}/__init__.py"], - ) - - s.move( - library / f"tests/", - f"tests", - ) - s.move(library / "scripts") - - -# ---------------------------------------------------------------------------- -# Generate firestore admin GAPIC layer -# ---------------------------------------------------------------------------- -for version in admin_versions: - library = gapic.py_library( - service="firestore_admin", - version=version, - bazel_target=f"//google/firestore/admin/{version}:firestore-admin-{version}-py", - ) - s.move( - library / f"google/cloud/firestore_admin_{version}", - f"google/cloud/firestore_admin_{version}", - excludes=[library / f"google/cloud/admin_{version}/__init__.py"], - ) - s.move(library / f"tests", f"tests") - s.move(library / "scripts") - - -# ---------------------------------------------------------------------------- -# Generate firestore bundle GAPIC layer -# ---------------------------------------------------------------------------- -for version in ["v1"]: - library = gapic.py_library( - service="firestore-bundle", - version=version, - proto_path='google/firestore/bundle', - bazel_target=f"//google/firestore/bundle:firestore-bundle-py", - ) - s.move( - library / f"google/cloud/bundle", - f"google/cloud/firestore_bundle", - ) - s.move(library / f"tests", f"tests") - - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library( - samples=False, # set to True only if there are samples - unit_test_python_versions=["3.6", "3.7", "3.8"], - system_test_python_versions=["3.7"], - unit_test_external_dependencies=["aiounittest"], - system_test_external_dependencies=["pytest-asyncio"], - microgenerator=True, - cov_level=100, -) - -s.move( - templated_files, -) - -s.replace( - "noxfile.py", - "GOOGLE_APPLICATION_CREDENTIALS", - "FIRESTORE_APPLICATION_CREDENTIALS", -) - -s.replace( - "noxfile.py", - '"--quiet", system_test', - '"--verbose", system_test', -) - -# Add pytype support -s.replace( - ".gitignore", - """\ -.pytest_cache -""", - """\ -.pytest_cache -.pytype -""", -) - -s.replace( - ".gitignore", - """\ -pylintrc -pylintrc.test -""", - """\ -pylintrc -pylintrc.test -.make/** -""", -) - -s.replace( - "setup.cfg", - """\ -universal = 1 -""", - """\ -universal = 1 -[pytype] -python_version = 3.8 -inputs = - google/cloud/ -exclude = - tests/ -output = .pytype/ -# Workaround for https://github.com/google/pytype/issues/150 -disable = pyi-error -""", -) - -s.replace( - "noxfile.py", - """\ -BLACK_VERSION = "black==19.10b0" -""", - """\ -PYTYPE_VERSION = "pytype==2020.7.24" -BLACK_VERSION = "black==19.10b0" -""", -) - -s.replace( - "noxfile.py", - """\ -@nox.session\(python=DEFAULT_PYTHON_VERSION\) -def lint_setup_py\(session\): -""", - '''\ -@nox.session(python="3.7") -def pytype(session): - """Run pytype - """ - session.install(PYTYPE_VERSION) - session.run("pytype",) -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): -''', -) - - -# Add message for missing 'libcst' dependency -s.replace( - "scripts/fixup*.py", - """\ -import libcst as cst -""", - """\ - -try: - import libcst as cst -except ImportError: - raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') - - -""", -) - -s.replace( - "google/cloud/firestore_bundle/types/bundle.py", - "from google.firestore.v1 import document_pb2 as gfv_document # type: ignore\n", - "from google.cloud.firestore_v1.types import document as gfv_document\n", -) - -s.replace( - "google/cloud/firestore_bundle/types/bundle.py", - "from google.firestore.v1 import query_pb2 as query # type: ignore\n", - "from google.cloud.firestore_v1.types import query\n", -) - -s.replace( - ".coveragerc", - """\ - raise NotImplementedError -omit = -""", - """\ - raise NotImplementedError - # Ignore setuptools-less fallback - except pkg_resources.DistributionNotFound: -omit = -""", -) - -s.replace( - "google/cloud/firestore_bundle/__init__.py", - "from .types.bundle import NamedQuery\n", - "from .types.bundle import NamedQuery\n\nfrom .bundle import FirestoreBundle\n", -) - -s.replace( - "google/cloud/firestore_bundle/__init__.py", - "\'BundledQuery\',", - "\"BundledQuery\",\n \"FirestoreBundle\",", -) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) - -s.replace( - ".kokoro/build.sh", - "# Setup service account credentials.", - """\ -# Setup firestore account credentials -export FIRESTORE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/firebase-credentials.json - -# Setup service account credentials.""", -) - - -# Add a section on updating conformance tests to contributing. -s.replace( - "CONTRIBUTING.rst", - "\nTest Coverage", - """************* -Updating Conformance Tests -************************** - -The firestore client libraries use a shared set of conformance tests, the source of which can be found at https://github.com/googleapis/conformance-tests. - -To update the copy of these conformance tests used by this repository, run the provided Makefile: - - $ make -f Makefile_v1 - -************* -Test Coverage""" -) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index b2e8797d5b..b202f9c210 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -8,3 +8,4 @@ google-api-core==1.22.2 google-cloud-core==1.4.1 proto-plus==1.10.0 +google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is required through google-api-core \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py index ab67290952..4de65971c2 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +12,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index ab67290952..4de65971c2 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +12,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..4de65971c2 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/bundle/__init__.py b/tests/unit/gapic/bundle/__init__.py index 42ffdf2bc4..4de65971c2 100644 --- a/tests/unit/gapic/bundle/__init__.py +++ b/tests/unit/gapic/bundle/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/firestore_admin_v1/__init__.py b/tests/unit/gapic/firestore_admin_v1/__init__.py index 42ffdf2bc4..4de65971c2 100644 --- a/tests/unit/gapic/firestore_admin_v1/__init__.py +++ b/tests/unit/gapic/firestore_admin_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index b7d6e48dd1..fde454b15f 100644 --- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.firestore_admin_v1.services.firestore_admin import ( FirestoreAdminAsyncClient, @@ -43,6 +42,12 @@ ) from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.services.firestore_admin import transports +from google.cloud.firestore_admin_v1.services.firestore_admin.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.firestore_admin_v1.services.firestore_admin.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.firestore_admin_v1.types import field from google.cloud.firestore_admin_v1.types import field as gfa_field from google.cloud.firestore_admin_v1.types import firestore_admin @@ -51,7 +56,31 @@ from google.cloud.firestore_admin_v1.types import operation as gfa_operation from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -102,7 +131,7 @@ def test__get_default_mtls_endpoint(): "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient,] ) def test_firestore_admin_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -119,7 +148,7 @@ def test_firestore_admin_client_from_service_account_info(client_class): "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient,] ) def test_firestore_admin_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -172,7 +201,7 @@ def test_firestore_admin_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(FirestoreAdminClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -460,7 +489,7 @@ def test_create_index( transport: str = "grpc", request_type=firestore_admin.CreateIndexRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -471,13 +500,11 @@ def test_create_index( with mock.patch.object(type(client.transport.create_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() # Establish that the response is the type that we expect. @@ -492,7 +519,7 @@ def test_create_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -500,7 +527,6 @@ def test_create_index_empty_call(): client.create_index() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() @@ -509,7 +535,7 @@ async def test_create_index_async( transport: str = "grpc_asyncio", request_type=firestore_admin.CreateIndexRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -522,13 +548,11 @@ async def test_create_index_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.CreateIndexRequest() # Establish that the response is the type that we expect. @@ -541,17 +565,17 @@ async def test_create_index_async_from_dict(): def test_create_index_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.CreateIndexRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_index(request) # Establish that the underlying gRPC stub method was called. @@ -566,11 +590,14 @@ def test_create_index_field_headers(): @pytest.mark.asyncio async def test_create_index_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.CreateIndexRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -578,7 +605,6 @@ async def test_create_index_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_index(request) # Establish that the underlying gRPC stub method was called. @@ -592,13 +618,12 @@ async def test_create_index_field_headers_async(): def test_create_index_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_index( @@ -609,14 +634,12 @@ def test_create_index_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].index == gfa_index.Index(name="name_value") def test_create_index_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -630,7 +653,9 @@ def test_create_index_flattened_error(): @pytest.mark.asyncio async def test_create_index_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_index), "__call__") as call: @@ -650,15 +675,15 @@ async def test_create_index_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].index == gfa_index.Index(name="name_value") @pytest.mark.asyncio async def test_create_index_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -674,7 +699,7 @@ def test_list_indexes( transport: str = "grpc", request_type=firestore_admin.ListIndexesRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -687,19 +712,15 @@ def test_list_indexes( call.return_value = firestore_admin.ListIndexesResponse( next_page_token="next_page_token_value", ) - response = client.list_indexes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListIndexesPager) - assert response.next_page_token == "next_page_token_value" @@ -711,7 +732,7 @@ def test_list_indexes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -719,7 +740,6 @@ def test_list_indexes_empty_call(): client.list_indexes() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() @@ -728,7 +748,7 @@ async def test_list_indexes_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ListIndexesRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -743,18 +763,15 @@ async def test_list_indexes_async( next_page_token="next_page_token_value", ) ) - response = await client.list_indexes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -764,17 +781,17 @@ async def test_list_indexes_async_from_dict(): def test_list_indexes_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ListIndexesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: call.return_value = firestore_admin.ListIndexesResponse() - client.list_indexes(request) # Establish that the underlying gRPC stub method was called. @@ -789,11 +806,14 @@ def test_list_indexes_field_headers(): @pytest.mark.asyncio async def test_list_indexes_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ListIndexesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -801,7 +821,6 @@ async def test_list_indexes_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore_admin.ListIndexesResponse() ) - await client.list_indexes(request) # Establish that the underlying gRPC stub method was called. @@ -815,13 +834,12 @@ async def test_list_indexes_field_headers_async(): def test_list_indexes_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = firestore_admin.ListIndexesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_indexes(parent="parent_value",) @@ -830,12 +848,11 @@ def test_list_indexes_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_indexes_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -847,7 +864,9 @@ def test_list_indexes_flattened_error(): @pytest.mark.asyncio async def test_list_indexes_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -865,13 +884,14 @@ async def test_list_indexes_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_indexes_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -882,7 +902,7 @@ async def test_list_indexes_flattened_error_async(): def test_list_indexes_pager(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -916,7 +936,7 @@ def test_list_indexes_pager(): def test_list_indexes_pages(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -942,7 +962,7 @@ def test_list_indexes_pages(): @pytest.mark.asyncio async def test_list_indexes_async_pager(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -975,7 +995,7 @@ async def test_list_indexes_async_pager(): @pytest.mark.asyncio async def test_list_indexes_async_pages(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1007,7 +1027,7 @@ def test_get_index( transport: str = "grpc", request_type=firestore_admin.GetIndexRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1022,23 +1042,17 @@ def test_get_index( query_scope=index.Index.QueryScope.COLLECTION, state=index.Index.State.CREATING, ) - response = client.get_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() # Establish that the response is the type that we expect. - assert isinstance(response, index.Index) - assert response.name == "name_value" - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.state == index.Index.State.CREATING @@ -1050,7 +1064,7 @@ def test_get_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1058,7 +1072,6 @@ def test_get_index_empty_call(): client.get_index() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() @@ -1067,7 +1080,7 @@ async def test_get_index_async( transport: str = "grpc_asyncio", request_type=firestore_admin.GetIndexRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1084,22 +1097,17 @@ async def test_get_index_async( state=index.Index.State.CREATING, ) ) - response = await client.get_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetIndexRequest() # Establish that the response is the type that we expect. assert isinstance(response, index.Index) - assert response.name == "name_value" - assert response.query_scope == index.Index.QueryScope.COLLECTION - assert response.state == index.Index.State.CREATING @@ -1109,17 +1117,17 @@ async def test_get_index_async_from_dict(): def test_get_index_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.GetIndexRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: call.return_value = index.Index() - client.get_index(request) # Establish that the underlying gRPC stub method was called. @@ -1134,17 +1142,19 @@ def test_get_index_field_headers(): @pytest.mark.asyncio async def test_get_index_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.GetIndexRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(index.Index()) - await client.get_index(request) # Establish that the underlying gRPC stub method was called. @@ -1158,13 +1168,12 @@ async def test_get_index_field_headers_async(): def test_get_index_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = index.Index() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_index(name="name_value",) @@ -1173,12 +1182,11 @@ def test_get_index_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_index_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1190,7 +1198,9 @@ def test_get_index_flattened_error(): @pytest.mark.asyncio async def test_get_index_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_index), "__call__") as call: @@ -1206,13 +1216,14 @@ async def test_get_index_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_index_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1226,7 +1237,7 @@ def test_delete_index( transport: str = "grpc", request_type=firestore_admin.DeleteIndexRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1237,13 +1248,11 @@ def test_delete_index( with mock.patch.object(type(client.transport.delete_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() # Establish that the response is the type that we expect. @@ -1258,7 +1267,7 @@ def test_delete_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1266,7 +1275,6 @@ def test_delete_index_empty_call(): client.delete_index() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() @@ -1275,7 +1283,7 @@ async def test_delete_index_async( transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteIndexRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1286,13 +1294,11 @@ async def test_delete_index_async( with mock.patch.object(type(client.transport.delete_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_index(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.DeleteIndexRequest() # Establish that the response is the type that we expect. @@ -1305,17 +1311,17 @@ async def test_delete_index_async_from_dict(): def test_delete_index_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.DeleteIndexRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: call.return_value = None - client.delete_index(request) # Establish that the underlying gRPC stub method was called. @@ -1330,17 +1336,19 @@ def test_delete_index_field_headers(): @pytest.mark.asyncio async def test_delete_index_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.DeleteIndexRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_index(request) # Establish that the underlying gRPC stub method was called. @@ -1354,13 +1362,12 @@ async def test_delete_index_field_headers_async(): def test_delete_index_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_index(name="name_value",) @@ -1369,12 +1376,11 @@ def test_delete_index_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_index_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1386,7 +1392,9 @@ def test_delete_index_flattened_error(): @pytest.mark.asyncio async def test_delete_index_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_index), "__call__") as call: @@ -1402,13 +1410,14 @@ async def test_delete_index_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_index_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1422,7 +1431,7 @@ def test_get_field( transport: str = "grpc", request_type=firestore_admin.GetFieldRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1433,19 +1442,15 @@ def test_get_field( with mock.patch.object(type(client.transport.get_field), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = field.Field(name="name_value",) - response = client.get_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() # Establish that the response is the type that we expect. - assert isinstance(response, field.Field) - assert response.name == "name_value" @@ -1457,7 +1462,7 @@ def test_get_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1465,7 +1470,6 @@ def test_get_field_empty_call(): client.get_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() @@ -1474,7 +1478,7 @@ async def test_get_field_async( transport: str = "grpc_asyncio", request_type=firestore_admin.GetFieldRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1487,18 +1491,15 @@ async def test_get_field_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( field.Field(name="name_value",) ) - response = await client.get_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.GetFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, field.Field) - assert response.name == "name_value" @@ -1508,17 +1509,17 @@ async def test_get_field_async_from_dict(): def test_get_field_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.GetFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_field), "__call__") as call: call.return_value = field.Field() - client.get_field(request) # Establish that the underlying gRPC stub method was called. @@ -1533,17 +1534,19 @@ def test_get_field_field_headers(): @pytest.mark.asyncio async def test_get_field_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.GetFieldRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_field), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(field.Field()) - await client.get_field(request) # Establish that the underlying gRPC stub method was called. @@ -1557,13 +1560,12 @@ async def test_get_field_field_headers_async(): def test_get_field_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_field), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = field.Field() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_field(name="name_value",) @@ -1572,12 +1574,11 @@ def test_get_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_field_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1589,7 +1590,9 @@ def test_get_field_flattened_error(): @pytest.mark.asyncio async def test_get_field_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_field), "__call__") as call: @@ -1605,13 +1608,14 @@ async def test_get_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_field_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1625,7 +1629,7 @@ def test_update_field( transport: str = "grpc", request_type=firestore_admin.UpdateFieldRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1636,13 +1640,11 @@ def test_update_field( with mock.patch.object(type(client.transport.update_field), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() # Establish that the response is the type that we expect. @@ -1657,7 +1659,7 @@ def test_update_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1665,7 +1667,6 @@ def test_update_field_empty_call(): client.update_field() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() @@ -1674,7 +1675,7 @@ async def test_update_field_async( transport: str = "grpc_asyncio", request_type=firestore_admin.UpdateFieldRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1687,13 +1688,11 @@ async def test_update_field_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.UpdateFieldRequest() # Establish that the response is the type that we expect. @@ -1706,17 +1705,17 @@ async def test_update_field_async_from_dict(): def test_update_field_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.UpdateFieldRequest() + request.field.name = "field.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_field), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_field(request) # Establish that the underlying gRPC stub method was called. @@ -1731,11 +1730,14 @@ def test_update_field_field_headers(): @pytest.mark.asyncio async def test_update_field_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.UpdateFieldRequest() + request.field.name = "field.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1743,7 +1745,6 @@ async def test_update_field_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_field(request) # Establish that the underlying gRPC stub method was called. @@ -1757,13 +1758,12 @@ async def test_update_field_field_headers_async(): def test_update_field_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_field), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_field(field=gfa_field.Field(name="name_value"),) @@ -1772,12 +1772,11 @@ def test_update_field_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].field == gfa_field.Field(name="name_value") def test_update_field_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1790,7 +1789,9 @@ def test_update_field_flattened_error(): @pytest.mark.asyncio async def test_update_field_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_field), "__call__") as call: @@ -1808,13 +1809,14 @@ async def test_update_field_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].field == gfa_field.Field(name="name_value") @pytest.mark.asyncio async def test_update_field_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1829,7 +1831,7 @@ def test_list_fields( transport: str = "grpc", request_type=firestore_admin.ListFieldsRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1842,19 +1844,15 @@ def test_list_fields( call.return_value = firestore_admin.ListFieldsResponse( next_page_token="next_page_token_value", ) - response = client.list_fields(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListFieldsPager) - assert response.next_page_token == "next_page_token_value" @@ -1866,7 +1864,7 @@ def test_list_fields_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1874,7 +1872,6 @@ def test_list_fields_empty_call(): client.list_fields() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() @@ -1883,7 +1880,7 @@ async def test_list_fields_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ListFieldsRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1896,18 +1893,15 @@ async def test_list_fields_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore_admin.ListFieldsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_fields(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ListFieldsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListFieldsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1917,17 +1911,17 @@ async def test_list_fields_async_from_dict(): def test_list_fields_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ListFieldsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_fields), "__call__") as call: call.return_value = firestore_admin.ListFieldsResponse() - client.list_fields(request) # Establish that the underlying gRPC stub method was called. @@ -1942,11 +1936,14 @@ def test_list_fields_field_headers(): @pytest.mark.asyncio async def test_list_fields_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ListFieldsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1954,7 +1951,6 @@ async def test_list_fields_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore_admin.ListFieldsResponse() ) - await client.list_fields(request) # Establish that the underlying gRPC stub method was called. @@ -1968,13 +1964,12 @@ async def test_list_fields_field_headers_async(): def test_list_fields_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_fields), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = firestore_admin.ListFieldsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_fields(parent="parent_value",) @@ -1983,12 +1978,11 @@ def test_list_fields_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_fields_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2000,7 +1994,9 @@ def test_list_fields_flattened_error(): @pytest.mark.asyncio async def test_list_fields_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_fields), "__call__") as call: @@ -2018,13 +2014,14 @@ async def test_list_fields_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_fields_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2035,7 +2032,7 @@ async def test_list_fields_flattened_error_async(): def test_list_fields_pager(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_fields), "__call__") as call: @@ -2067,7 +2064,7 @@ def test_list_fields_pager(): def test_list_fields_pages(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_fields), "__call__") as call: @@ -2091,7 +2088,7 @@ def test_list_fields_pages(): @pytest.mark.asyncio async def test_list_fields_async_pager(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2122,7 +2119,7 @@ async def test_list_fields_async_pager(): @pytest.mark.asyncio async def test_list_fields_async_pages(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAdminAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2152,7 +2149,7 @@ def test_export_documents( transport: str = "grpc", request_type=firestore_admin.ExportDocumentsRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2163,13 +2160,11 @@ def test_export_documents( with mock.patch.object(type(client.transport.export_documents), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.export_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() # Establish that the response is the type that we expect. @@ -2184,7 +2179,7 @@ def test_export_documents_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2192,7 +2187,6 @@ def test_export_documents_empty_call(): client.export_documents() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() @@ -2201,7 +2195,7 @@ async def test_export_documents_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ExportDocumentsRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2214,13 +2208,11 @@ async def test_export_documents_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.export_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ExportDocumentsRequest() # Establish that the response is the type that we expect. @@ -2233,17 +2225,17 @@ async def test_export_documents_async_from_dict(): def test_export_documents_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ExportDocumentsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_documents), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.export_documents(request) # Establish that the underlying gRPC stub method was called. @@ -2258,11 +2250,14 @@ def test_export_documents_field_headers(): @pytest.mark.asyncio async def test_export_documents_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ExportDocumentsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2270,7 +2265,6 @@ async def test_export_documents_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.export_documents(request) # Establish that the underlying gRPC stub method was called. @@ -2284,13 +2278,12 @@ async def test_export_documents_field_headers_async(): def test_export_documents_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_documents), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.export_documents(name="name_value",) @@ -2299,12 +2292,11 @@ def test_export_documents_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_export_documents_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2316,7 +2308,9 @@ def test_export_documents_flattened_error(): @pytest.mark.asyncio async def test_export_documents_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_documents), "__call__") as call: @@ -2334,13 +2328,14 @@ async def test_export_documents_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_export_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2354,7 +2349,7 @@ def test_import_documents( transport: str = "grpc", request_type=firestore_admin.ImportDocumentsRequest ): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2365,13 +2360,11 @@ def test_import_documents( with mock.patch.object(type(client.transport.import_documents), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() # Establish that the response is the type that we expect. @@ -2386,7 +2379,7 @@ def test_import_documents_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2394,7 +2387,6 @@ def test_import_documents_empty_call(): client.import_documents() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() @@ -2403,7 +2395,7 @@ async def test_import_documents_async( transport: str = "grpc_asyncio", request_type=firestore_admin.ImportDocumentsRequest ): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2416,13 +2408,11 @@ async def test_import_documents_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore_admin.ImportDocumentsRequest() # Establish that the response is the type that we expect. @@ -2435,17 +2425,17 @@ async def test_import_documents_async_from_dict(): def test_import_documents_field_headers(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ImportDocumentsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_documents), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.import_documents(request) # Establish that the underlying gRPC stub method was called. @@ -2460,11 +2450,14 @@ def test_import_documents_field_headers(): @pytest.mark.asyncio async def test_import_documents_field_headers_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore_admin.ImportDocumentsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2472,7 +2465,6 @@ async def test_import_documents_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.import_documents(request) # Establish that the underlying gRPC stub method was called. @@ -2486,13 +2478,12 @@ async def test_import_documents_field_headers_async(): def test_import_documents_flattened(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_documents), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.import_documents(name="name_value",) @@ -2501,12 +2492,11 @@ def test_import_documents_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_import_documents_flattened_error(): - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2518,7 +2508,9 @@ def test_import_documents_flattened_error(): @pytest.mark.asyncio async def test_import_documents_flattened_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_documents), "__call__") as call: @@ -2536,13 +2528,14 @@ async def test_import_documents_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_import_documents_flattened_error_async(): - client = FirestoreAdminAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2555,16 +2548,16 @@ async def test_import_documents_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.FirestoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.FirestoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = FirestoreAdminClient( @@ -2574,7 +2567,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.FirestoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = FirestoreAdminClient( @@ -2585,7 +2578,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.FirestoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = FirestoreAdminClient(transport=transport) assert client.transport is transport @@ -2594,13 +2587,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.FirestoreAdminGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.FirestoreAdminGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2615,23 +2608,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = FirestoreAdminClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAdminClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.FirestoreAdminGrpcTransport,) def test_firestore_admin_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.FirestoreAdminTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2643,7 +2636,7 @@ def test_firestore_admin_base_transport(): ) as Transport: Transport.return_value = None transport = transports.FirestoreAdminTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2669,15 +2662,40 @@ def test_firestore_admin_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_firestore_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirestoreAdminTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_firestore_admin_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.FirestoreAdminTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2693,19 +2711,36 @@ def test_firestore_admin_base_transport_with_credentials_file(): def test_firestore_admin_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.FirestoreAdminTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_firestore_admin_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirestoreAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_firestore_admin_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) FirestoreAdminClient() adc.assert_called_once_with( scopes=( @@ -2716,14 +2751,44 @@ def test_firestore_admin_auth_adc(): ) -def test_firestore_admin_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_firestore_admin_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.FirestoreAdminGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FirestoreAdminGrpcTransport, + transports.FirestoreAdminGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_firestore_admin_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -2733,6 +2798,121 @@ def test_firestore_admin_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreAdminGrpcTransport, grpc_helpers), + (transports.FirestoreAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_firestore_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=["1", "2"], + default_host="firestore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreAdminGrpcTransport, grpc_helpers), + (transports.FirestoreAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_firestore_admin_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreAdminGrpcTransport, grpc_helpers), + (transports.FirestoreAdminGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_firestore_admin_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2741,7 +2921,7 @@ def test_firestore_admin_transport_auth_adc(): ], ) def test_firestore_admin_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2783,7 +2963,7 @@ def test_firestore_admin_grpc_transport_client_cert_source_for_mtls(transport_cl def test_firestore_admin_host_no_port(): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="firestore.googleapis.com" ), @@ -2793,7 +2973,7 @@ def test_firestore_admin_host_no_port(): def test_firestore_admin_host_with_port(): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="firestore.googleapis.com:8000" ), @@ -2849,9 +3029,9 @@ def test_firestore_admin_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2933,7 +3113,7 @@ def test_firestore_admin_transport_channel_mtls_with_adc(transport_class): def test_firestore_admin_grpc_lro_client(): client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2946,7 +3126,7 @@ def test_firestore_admin_grpc_lro_client(): def test_firestore_admin_grpc_lro_async_client(): client = FirestoreAdminAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2961,7 +3141,6 @@ def test_collection_group_path(): project = "squid" database = "clam" collection = "whelk" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}".format( project=project, database=database, collection=collection, ) @@ -2985,7 +3164,6 @@ def test_parse_collection_group_path(): def test_database_path(): project = "cuttlefish" database = "mussel" - expected = "projects/{project}/databases/{database}".format( project=project, database=database, ) @@ -3010,7 +3188,6 @@ def test_field_path(): database = "abalone" collection = "squid" field = "clam" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/fields/{field}".format( project=project, database=database, collection=collection, field=field, ) @@ -3037,7 +3214,6 @@ def test_index_path(): database = "mussel" collection = "winkle" index = "nautilus" - expected = "projects/{project}/databases/{database}/collectionGroups/{collection}/indexes/{index}".format( project=project, database=database, collection=collection, index=index, ) @@ -3061,7 +3237,6 @@ def test_parse_index_path(): def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3082,7 +3257,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) actual = FirestoreAdminClient.common_folder_path(folder) assert expected == actual @@ -3101,7 +3275,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) actual = FirestoreAdminClient.common_organization_path(organization) assert expected == actual @@ -3120,7 +3293,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) actual = FirestoreAdminClient.common_project_path(project) assert expected == actual @@ -3140,7 +3312,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -3167,7 +3338,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.FirestoreAdminTransport, "_prep_wrapped_messages" ) as prep: client = FirestoreAdminClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3176,6 +3347,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = FirestoreAdminClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/firestore_v1/__init__.py b/tests/unit/gapic/firestore_v1/__init__.py index 42ffdf2bc4..4de65971c2 100644 --- a/tests/unit/gapic/firestore_v1/__init__.py +++ b/tests/unit/gapic/firestore_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py index 2ff7e01f1c..f1ef4155cf 100644 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ b/tests/unit/gapic/firestore_v1/test_firestore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,18 +23,24 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.firestore_v1.services.firestore import FirestoreAsyncClient from google.cloud.firestore_v1.services.firestore import FirestoreClient from google.cloud.firestore_v1.services.firestore import pagers from google.cloud.firestore_v1.services.firestore import transports +from google.cloud.firestore_v1.services.firestore.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.firestore_v1.services.firestore.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document @@ -43,11 +48,35 @@ from google.cloud.firestore_v1.types import query from google.cloud.firestore_v1.types import write as gf_write from google.oauth2 import service_account -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.rpc import status_pb2 as status # type: ignore -from google.type import latlng_pb2 as latlng # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -91,7 +120,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient,]) def test_firestore_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -106,7 +135,7 @@ def test_firestore_client_from_service_account_info(client_class): @pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient,]) def test_firestore_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -155,7 +184,7 @@ def test_firestore_client_get_transport_class(): def test_firestore_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(FirestoreClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -439,7 +468,7 @@ def test_get_document( transport: str = "grpc", request_type=firestore.GetDocumentRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -450,19 +479,15 @@ def test_get_document( with mock.patch.object(type(client.transport.get_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = document.Document(name="name_value",) - response = client.get_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" @@ -474,7 +499,7 @@ def test_get_document_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -482,7 +507,6 @@ def test_get_document_empty_call(): client.get_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() @@ -491,7 +515,7 @@ async def test_get_document_async( transport: str = "grpc_asyncio", request_type=firestore.GetDocumentRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -504,18 +528,15 @@ async def test_get_document_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( document.Document(name="name_value",) ) - response = await client.get_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.GetDocumentRequest() # Establish that the response is the type that we expect. assert isinstance(response, document.Document) - assert response.name == "name_value" @@ -525,17 +546,17 @@ async def test_get_document_async_from_dict(): def test_get_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.GetDocumentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_document), "__call__") as call: call.return_value = document.Document() - client.get_document(request) # Establish that the underlying gRPC stub method was called. @@ -550,17 +571,17 @@ def test_get_document_field_headers(): @pytest.mark.asyncio async def test_get_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.GetDocumentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_document), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - await client.get_document(request) # Establish that the underlying gRPC stub method was called. @@ -577,7 +598,7 @@ def test_list_documents( transport: str = "grpc", request_type=firestore.ListDocumentsRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -590,19 +611,15 @@ def test_list_documents( call.return_value = firestore.ListDocumentsResponse( next_page_token="next_page_token_value", ) - response = client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" @@ -614,7 +631,7 @@ def test_list_documents_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -622,7 +639,6 @@ def test_list_documents_empty_call(): client.list_documents() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() @@ -631,7 +647,7 @@ async def test_list_documents_async( transport: str = "grpc_asyncio", request_type=firestore.ListDocumentsRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -644,18 +660,15 @@ async def test_list_documents_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.ListDocumentsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListDocumentsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDocumentsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -665,17 +678,17 @@ async def test_list_documents_async_from_dict(): def test_list_documents_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.ListDocumentsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_documents), "__call__") as call: call.return_value = firestore.ListDocumentsResponse() - client.list_documents(request) # Establish that the underlying gRPC stub method was called. @@ -690,11 +703,12 @@ def test_list_documents_field_headers(): @pytest.mark.asyncio async def test_list_documents_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.ListDocumentsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -702,7 +716,6 @@ async def test_list_documents_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.ListDocumentsResponse() ) - await client.list_documents(request) # Establish that the underlying gRPC stub method was called. @@ -716,7 +729,7 @@ async def test_list_documents_field_headers_async(): def test_list_documents_pager(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_documents), "__call__") as call: @@ -754,7 +767,7 @@ def test_list_documents_pager(): def test_list_documents_pages(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_documents), "__call__") as call: @@ -784,7 +797,7 @@ def test_list_documents_pages(): @pytest.mark.asyncio async def test_list_documents_async_pager(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -821,7 +834,7 @@ async def test_list_documents_async_pager(): @pytest.mark.asyncio async def test_list_documents_async_pages(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -857,7 +870,7 @@ def test_update_document( transport: str = "grpc", request_type=firestore.UpdateDocumentRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -868,19 +881,15 @@ def test_update_document( with mock.patch.object(type(client.transport.update_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = gf_document.Document(name="name_value",) - response = client.update_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - assert response.name == "name_value" @@ -892,7 +901,7 @@ def test_update_document_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -900,7 +909,6 @@ def test_update_document_empty_call(): client.update_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() @@ -909,7 +917,7 @@ async def test_update_document_async( transport: str = "grpc_asyncio", request_type=firestore.UpdateDocumentRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -922,18 +930,15 @@ async def test_update_document_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gf_document.Document(name="name_value",) ) - response = await client.update_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.UpdateDocumentRequest() # Establish that the response is the type that we expect. assert isinstance(response, gf_document.Document) - assert response.name == "name_value" @@ -943,17 +948,17 @@ async def test_update_document_async_from_dict(): def test_update_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.UpdateDocumentRequest() + request.document.name = "document.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_document), "__call__") as call: call.return_value = gf_document.Document() - client.update_document(request) # Establish that the underlying gRPC stub method was called. @@ -970,11 +975,12 @@ def test_update_document_field_headers(): @pytest.mark.asyncio async def test_update_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.UpdateDocumentRequest() + request.document.name = "document.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -982,7 +988,6 @@ async def test_update_document_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gf_document.Document() ) - await client.update_document(request) # Establish that the underlying gRPC stub method was called. @@ -998,13 +1003,12 @@ async def test_update_document_field_headers_async(): def test_update_document_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = gf_document.Document() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_document( @@ -1016,16 +1020,14 @@ def test_update_document_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == gf_document.Document(name="name_value") - assert args[0].update_mask == common.DocumentMask( field_paths=["field_paths_value"] ) def test_update_document_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1039,7 +1041,7 @@ def test_update_document_flattened_error(): @pytest.mark.asyncio async def test_update_document_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_document), "__call__") as call: @@ -1060,9 +1062,7 @@ async def test_update_document_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == gf_document.Document(name="name_value") - assert args[0].update_mask == common.DocumentMask( field_paths=["field_paths_value"] ) @@ -1070,7 +1070,7 @@ async def test_update_document_flattened_async(): @pytest.mark.asyncio async def test_update_document_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1086,7 +1086,7 @@ def test_delete_document( transport: str = "grpc", request_type=firestore.DeleteDocumentRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1097,13 +1097,11 @@ def test_delete_document( with mock.patch.object(type(client.transport.delete_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() # Establish that the response is the type that we expect. @@ -1118,7 +1116,7 @@ def test_delete_document_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1126,7 +1124,6 @@ def test_delete_document_empty_call(): client.delete_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() @@ -1135,7 +1132,7 @@ async def test_delete_document_async( transport: str = "grpc_asyncio", request_type=firestore.DeleteDocumentRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1146,13 +1143,11 @@ async def test_delete_document_async( with mock.patch.object(type(client.transport.delete_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.DeleteDocumentRequest() # Establish that the response is the type that we expect. @@ -1165,17 +1160,17 @@ async def test_delete_document_async_from_dict(): def test_delete_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.DeleteDocumentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_document), "__call__") as call: call.return_value = None - client.delete_document(request) # Establish that the underlying gRPC stub method was called. @@ -1190,17 +1185,17 @@ def test_delete_document_field_headers(): @pytest.mark.asyncio async def test_delete_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.DeleteDocumentRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_document), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_document(request) # Establish that the underlying gRPC stub method was called. @@ -1214,13 +1209,12 @@ async def test_delete_document_field_headers_async(): def test_delete_document_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_document(name="name_value",) @@ -1229,12 +1223,11 @@ def test_delete_document_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_document_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1246,7 +1239,7 @@ def test_delete_document_flattened_error(): @pytest.mark.asyncio async def test_delete_document_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_document), "__call__") as call: @@ -1262,13 +1255,12 @@ async def test_delete_document_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_document_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1282,7 +1274,7 @@ def test_batch_get_documents( transport: str = "grpc", request_type=firestore.BatchGetDocumentsRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1295,13 +1287,11 @@ def test_batch_get_documents( ) as call: # Designate an appropriate return value for the call. call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - response = client.batch_get_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() # Establish that the response is the type that we expect. @@ -1317,7 +1307,7 @@ def test_batch_get_documents_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1327,7 +1317,6 @@ def test_batch_get_documents_empty_call(): client.batch_get_documents() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() @@ -1336,7 +1325,7 @@ async def test_batch_get_documents_async( transport: str = "grpc_asyncio", request_type=firestore.BatchGetDocumentsRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1352,13 +1341,11 @@ async def test_batch_get_documents_async( call.return_value.read = mock.AsyncMock( side_effect=[firestore.BatchGetDocumentsResponse()] ) - response = await client.batch_get_documents(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchGetDocumentsRequest() # Establish that the response is the type that we expect. @@ -1372,11 +1359,12 @@ async def test_batch_get_documents_async_from_dict(): def test_batch_get_documents_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.BatchGetDocumentsRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1384,7 +1372,6 @@ def test_batch_get_documents_field_headers(): type(client.transport.batch_get_documents), "__call__" ) as call: call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - client.batch_get_documents(request) # Establish that the underlying gRPC stub method was called. @@ -1399,11 +1386,12 @@ def test_batch_get_documents_field_headers(): @pytest.mark.asyncio async def test_batch_get_documents_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.BatchGetDocumentsRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1414,7 +1402,6 @@ async def test_batch_get_documents_field_headers_async(): call.return_value.read = mock.AsyncMock( side_effect=[firestore.BatchGetDocumentsResponse()] ) - await client.batch_get_documents(request) # Establish that the underlying gRPC stub method was called. @@ -1431,7 +1418,7 @@ def test_begin_transaction( transport: str = "grpc", request_type=firestore.BeginTransactionRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1446,19 +1433,15 @@ def test_begin_transaction( call.return_value = firestore.BeginTransactionResponse( transaction=b"transaction_blob", ) - response = client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" @@ -1470,7 +1453,7 @@ def test_begin_transaction_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1480,7 +1463,6 @@ def test_begin_transaction_empty_call(): client.begin_transaction() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() @@ -1489,7 +1471,7 @@ async def test_begin_transaction_async( transport: str = "grpc_asyncio", request_type=firestore.BeginTransactionRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1504,18 +1486,15 @@ async def test_begin_transaction_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.BeginTransactionResponse(transaction=b"transaction_blob",) ) - response = await client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BeginTransactionRequest() # Establish that the response is the type that we expect. assert isinstance(response, firestore.BeginTransactionResponse) - assert response.transaction == b"transaction_blob" @@ -1525,11 +1504,12 @@ async def test_begin_transaction_async_from_dict(): def test_begin_transaction_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.BeginTransactionRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1537,7 +1517,6 @@ def test_begin_transaction_field_headers(): type(client.transport.begin_transaction), "__call__" ) as call: call.return_value = firestore.BeginTransactionResponse() - client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. @@ -1552,11 +1531,12 @@ def test_begin_transaction_field_headers(): @pytest.mark.asyncio async def test_begin_transaction_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.BeginTransactionRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1566,7 +1546,6 @@ async def test_begin_transaction_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.BeginTransactionResponse() ) - await client.begin_transaction(request) # Establish that the underlying gRPC stub method was called. @@ -1580,7 +1559,7 @@ async def test_begin_transaction_field_headers_async(): def test_begin_transaction_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1588,7 +1567,6 @@ def test_begin_transaction_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = firestore.BeginTransactionResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.begin_transaction(database="database_value",) @@ -1597,12 +1575,11 @@ def test_begin_transaction_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].database == "database_value" def test_begin_transaction_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1614,7 +1591,7 @@ def test_begin_transaction_flattened_error(): @pytest.mark.asyncio async def test_begin_transaction_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1634,13 +1611,12 @@ async def test_begin_transaction_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].database == "database_value" @pytest.mark.asyncio async def test_begin_transaction_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1652,7 +1628,7 @@ async def test_begin_transaction_flattened_error_async(): def test_commit(transport: str = "grpc", request_type=firestore.CommitRequest): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1663,17 +1639,14 @@ def test_commit(transport: str = "grpc", request_type=firestore.CommitRequest): with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = firestore.CommitResponse() - response = client.commit(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) @@ -1685,7 +1658,7 @@ def test_commit_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1693,7 +1666,6 @@ def test_commit_empty_call(): client.commit() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() @@ -1702,7 +1674,7 @@ async def test_commit_async( transport: str = "grpc_asyncio", request_type=firestore.CommitRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1715,13 +1687,11 @@ async def test_commit_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.CommitResponse() ) - response = await client.commit(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CommitRequest() # Establish that the response is the type that we expect. @@ -1734,17 +1704,17 @@ async def test_commit_async_from_dict(): def test_commit_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.CommitRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: call.return_value = firestore.CommitResponse() - client.commit(request) # Establish that the underlying gRPC stub method was called. @@ -1759,11 +1729,12 @@ def test_commit_field_headers(): @pytest.mark.asyncio async def test_commit_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.CommitRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1771,7 +1742,6 @@ async def test_commit_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.CommitResponse() ) - await client.commit(request) # Establish that the underlying gRPC stub method was called. @@ -1785,13 +1755,12 @@ async def test_commit_field_headers_async(): def test_commit_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = firestore.CommitResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.commit( @@ -1803,16 +1772,14 @@ def test_commit_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].database == "database_value" - assert args[0].writes == [ gf_write.Write(update=document.Document(name="name_value")) ] def test_commit_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1826,7 +1793,7 @@ def test_commit_flattened_error(): @pytest.mark.asyncio async def test_commit_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: @@ -1847,9 +1814,7 @@ async def test_commit_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].database == "database_value" - assert args[0].writes == [ gf_write.Write(update=document.Document(name="name_value")) ] @@ -1857,7 +1822,7 @@ async def test_commit_flattened_async(): @pytest.mark.asyncio async def test_commit_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1871,7 +1836,7 @@ async def test_commit_flattened_error_async(): def test_rollback(transport: str = "grpc", request_type=firestore.RollbackRequest): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1882,13 +1847,11 @@ def test_rollback(transport: str = "grpc", request_type=firestore.RollbackReques with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.rollback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() # Establish that the response is the type that we expect. @@ -1903,7 +1866,7 @@ def test_rollback_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1911,7 +1874,6 @@ def test_rollback_empty_call(): client.rollback() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() @@ -1920,7 +1882,7 @@ async def test_rollback_async( transport: str = "grpc_asyncio", request_type=firestore.RollbackRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1931,13 +1893,11 @@ async def test_rollback_async( with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RollbackRequest() # Establish that the response is the type that we expect. @@ -1950,17 +1910,17 @@ async def test_rollback_async_from_dict(): def test_rollback_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.RollbackRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: call.return_value = None - client.rollback(request) # Establish that the underlying gRPC stub method was called. @@ -1975,17 +1935,17 @@ def test_rollback_field_headers(): @pytest.mark.asyncio async def test_rollback_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.RollbackRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request) # Establish that the underlying gRPC stub method was called. @@ -1999,13 +1959,12 @@ async def test_rollback_field_headers_async(): def test_rollback_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rollback( @@ -2016,14 +1975,12 @@ def test_rollback_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].database == "database_value" - assert args[0].transaction == b"transaction_blob" def test_rollback_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2037,7 +1994,7 @@ def test_rollback_flattened_error(): @pytest.mark.asyncio async def test_rollback_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback), "__call__") as call: @@ -2055,15 +2012,13 @@ async def test_rollback_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].database == "database_value" - assert args[0].transaction == b"transaction_blob" @pytest.mark.asyncio async def test_rollback_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2077,7 +2032,7 @@ async def test_rollback_flattened_error_async(): def test_run_query(transport: str = "grpc", request_type=firestore.RunQueryRequest): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2088,13 +2043,11 @@ def test_run_query(transport: str = "grpc", request_type=firestore.RunQueryReque with mock.patch.object(type(client.transport.run_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([firestore.RunQueryResponse()]) - response = client.run_query(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() # Establish that the response is the type that we expect. @@ -2110,7 +2063,7 @@ def test_run_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2118,7 +2071,6 @@ def test_run_query_empty_call(): client.run_query() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() @@ -2127,7 +2079,7 @@ async def test_run_query_async( transport: str = "grpc_asyncio", request_type=firestore.RunQueryRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2141,13 +2093,11 @@ async def test_run_query_async( call.return_value.read = mock.AsyncMock( side_effect=[firestore.RunQueryResponse()] ) - response = await client.run_query(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.RunQueryRequest() # Establish that the response is the type that we expect. @@ -2161,17 +2111,17 @@ async def test_run_query_async_from_dict(): def test_run_query_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.RunQueryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.run_query), "__call__") as call: call.return_value = iter([firestore.RunQueryResponse()]) - client.run_query(request) # Establish that the underlying gRPC stub method was called. @@ -2186,11 +2136,12 @@ def test_run_query_field_headers(): @pytest.mark.asyncio async def test_run_query_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.RunQueryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2199,7 +2150,6 @@ async def test_run_query_field_headers_async(): call.return_value.read = mock.AsyncMock( side_effect=[firestore.RunQueryResponse()] ) - await client.run_query(request) # Establish that the underlying gRPC stub method was called. @@ -2216,7 +2166,7 @@ def test_partition_query( transport: str = "grpc", request_type=firestore.PartitionQueryRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2229,19 +2179,15 @@ def test_partition_query( call.return_value = firestore.PartitionQueryResponse( next_page_token="next_page_token_value", ) - response = client.partition_query(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.PartitionQueryPager) - assert response.next_page_token == "next_page_token_value" @@ -2253,7 +2199,7 @@ def test_partition_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2261,7 +2207,6 @@ def test_partition_query_empty_call(): client.partition_query() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() @@ -2270,7 +2215,7 @@ async def test_partition_query_async( transport: str = "grpc_asyncio", request_type=firestore.PartitionQueryRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2283,18 +2228,15 @@ async def test_partition_query_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.PartitionQueryResponse(next_page_token="next_page_token_value",) ) - response = await client.partition_query(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.PartitionQueryRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.PartitionQueryAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -2304,17 +2246,17 @@ async def test_partition_query_async_from_dict(): def test_partition_query_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.PartitionQueryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.partition_query), "__call__") as call: call.return_value = firestore.PartitionQueryResponse() - client.partition_query(request) # Establish that the underlying gRPC stub method was called. @@ -2329,11 +2271,12 @@ def test_partition_query_field_headers(): @pytest.mark.asyncio async def test_partition_query_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.PartitionQueryRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2341,7 +2284,6 @@ async def test_partition_query_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.PartitionQueryResponse() ) - await client.partition_query(request) # Establish that the underlying gRPC stub method was called. @@ -2355,7 +2297,7 @@ async def test_partition_query_field_headers_async(): def test_partition_query_pager(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.partition_query), "__call__") as call: @@ -2389,7 +2331,7 @@ def test_partition_query_pager(): def test_partition_query_pages(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.partition_query), "__call__") as call: @@ -2415,7 +2357,7 @@ def test_partition_query_pages(): @pytest.mark.asyncio async def test_partition_query_async_pager(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2448,7 +2390,7 @@ async def test_partition_query_async_pager(): @pytest.mark.asyncio async def test_partition_query_async_pages(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2478,26 +2420,23 @@ async def test_partition_query_async_pages(): def test_write(transport: str = "grpc", request_type=firestore.WriteRequest): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.write), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([firestore.WriteResponse()]) - response = client.write(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -2514,13 +2453,12 @@ async def test_write_async( transport: str = "grpc_asyncio", request_type=firestore.WriteRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. @@ -2528,13 +2466,11 @@ async def test_write_async( # Designate an appropriate return value for the call. call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) call.return_value.read = mock.AsyncMock(side_effect=[firestore.WriteResponse()]) - response = await client.write(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -2549,26 +2485,23 @@ async def test_write_async_from_dict(): def test_listen(transport: str = "grpc", request_type=firestore.ListenRequest): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.listen), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([firestore.ListenResponse()]) - response = client.listen(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -2585,13 +2518,12 @@ async def test_listen_async( transport: str = "grpc_asyncio", request_type=firestore.ListenRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. @@ -2601,13 +2533,11 @@ async def test_listen_async( call.return_value.read = mock.AsyncMock( side_effect=[firestore.ListenResponse()] ) - response = await client.listen(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -2624,7 +2554,7 @@ def test_list_collection_ids( transport: str = "grpc", request_type=firestore.ListCollectionIdsRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2640,21 +2570,16 @@ def test_list_collection_ids( collection_ids=["collection_ids_value"], next_page_token="next_page_token_value", ) - response = client.list_collection_ids(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCollectionIdsPager) - assert response.collection_ids == ["collection_ids_value"] - assert response.next_page_token == "next_page_token_value" @@ -2666,7 +2591,7 @@ def test_list_collection_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2676,7 +2601,6 @@ def test_list_collection_ids_empty_call(): client.list_collection_ids() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() @@ -2685,7 +2609,7 @@ async def test_list_collection_ids_async( transport: str = "grpc_asyncio", request_type=firestore.ListCollectionIdsRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2703,20 +2627,16 @@ async def test_list_collection_ids_async( next_page_token="next_page_token_value", ) ) - response = await client.list_collection_ids(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ListCollectionIdsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCollectionIdsAsyncPager) - assert response.collection_ids == ["collection_ids_value"] - assert response.next_page_token == "next_page_token_value" @@ -2726,11 +2646,12 @@ async def test_list_collection_ids_async_from_dict(): def test_list_collection_ids_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.ListCollectionIdsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2738,7 +2659,6 @@ def test_list_collection_ids_field_headers(): type(client.transport.list_collection_ids), "__call__" ) as call: call.return_value = firestore.ListCollectionIdsResponse() - client.list_collection_ids(request) # Establish that the underlying gRPC stub method was called. @@ -2753,11 +2673,12 @@ def test_list_collection_ids_field_headers(): @pytest.mark.asyncio async def test_list_collection_ids_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.ListCollectionIdsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2767,7 +2688,6 @@ async def test_list_collection_ids_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.ListCollectionIdsResponse() ) - await client.list_collection_ids(request) # Establish that the underlying gRPC stub method was called. @@ -2781,7 +2701,7 @@ async def test_list_collection_ids_field_headers_async(): def test_list_collection_ids_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2789,7 +2709,6 @@ def test_list_collection_ids_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = firestore.ListCollectionIdsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_collection_ids(parent="parent_value",) @@ -2798,12 +2717,11 @@ def test_list_collection_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_collection_ids_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2815,7 +2733,7 @@ def test_list_collection_ids_flattened_error(): @pytest.mark.asyncio async def test_list_collection_ids_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2835,13 +2753,12 @@ async def test_list_collection_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_collection_ids_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2852,7 +2769,7 @@ async def test_list_collection_ids_flattened_error_async(): def test_list_collection_ids_pager(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2887,7 +2804,7 @@ def test_list_collection_ids_pager(): def test_list_collection_ids_pages(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2914,7 +2831,7 @@ def test_list_collection_ids_pages(): @pytest.mark.asyncio async def test_list_collection_ids_async_pager(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2948,7 +2865,7 @@ async def test_list_collection_ids_async_pager(): @pytest.mark.asyncio async def test_list_collection_ids_async_pages(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2979,7 +2896,7 @@ async def test_list_collection_ids_async_pages(): def test_batch_write(transport: str = "grpc", request_type=firestore.BatchWriteRequest): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2990,17 +2907,14 @@ def test_batch_write(transport: str = "grpc", request_type=firestore.BatchWriteR with mock.patch.object(type(client.transport.batch_write), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = firestore.BatchWriteResponse() - response = client.batch_write(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BatchWriteResponse) @@ -3012,7 +2926,7 @@ def test_batch_write_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3020,7 +2934,6 @@ def test_batch_write_empty_call(): client.batch_write() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() @@ -3029,7 +2942,7 @@ async def test_batch_write_async( transport: str = "grpc_asyncio", request_type=firestore.BatchWriteRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3042,13 +2955,11 @@ async def test_batch_write_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.BatchWriteResponse() ) - response = await client.batch_write(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.BatchWriteRequest() # Establish that the response is the type that we expect. @@ -3061,17 +2972,17 @@ async def test_batch_write_async_from_dict(): def test_batch_write_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.BatchWriteRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.batch_write), "__call__") as call: call.return_value = firestore.BatchWriteResponse() - client.batch_write(request) # Establish that the underlying gRPC stub method was called. @@ -3086,11 +2997,12 @@ def test_batch_write_field_headers(): @pytest.mark.asyncio async def test_batch_write_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.BatchWriteRequest() + request.database = "database/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3098,7 +3010,6 @@ async def test_batch_write_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( firestore.BatchWriteResponse() ) - await client.batch_write(request) # Establish that the underlying gRPC stub method was called. @@ -3115,7 +3026,7 @@ def test_create_document( transport: str = "grpc", request_type=firestore.CreateDocumentRequest ): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3126,19 +3037,15 @@ def test_create_document( with mock.patch.object(type(client.transport.create_document), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = document.Document(name="name_value",) - response = client.create_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" @@ -3150,7 +3057,7 @@ def test_create_document_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3158,7 +3065,6 @@ def test_create_document_empty_call(): client.create_document() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() @@ -3167,7 +3073,7 @@ async def test_create_document_async( transport: str = "grpc_asyncio", request_type=firestore.CreateDocumentRequest ): client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3180,18 +3086,15 @@ async def test_create_document_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( document.Document(name="name_value",) ) - response = await client.create_document(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == firestore.CreateDocumentRequest() # Establish that the response is the type that we expect. assert isinstance(response, document.Document) - assert response.name == "name_value" @@ -3201,17 +3104,17 @@ async def test_create_document_async_from_dict(): def test_create_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.CreateDocumentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_document), "__call__") as call: call.return_value = document.Document() - client.create_document(request) # Establish that the underlying gRPC stub method was called. @@ -3226,17 +3129,17 @@ def test_create_document_field_headers(): @pytest.mark.asyncio async def test_create_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = firestore.CreateDocumentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_document), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - await client.create_document(request) # Establish that the underlying gRPC stub method was called. @@ -3252,16 +3155,16 @@ async def test_create_document_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = FirestoreClient( @@ -3271,7 +3174,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = FirestoreClient( @@ -3282,7 +3185,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = FirestoreClient(transport=transport) assert client.transport is transport @@ -3291,13 +3194,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.FirestoreGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -3309,23 +3212,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) + client = FirestoreClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.FirestoreGrpcTransport,) def test_firestore_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.FirestoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -3337,7 +3240,7 @@ def test_firestore_base_transport(): ) as Transport: Transport.return_value = None transport = transports.FirestoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -3364,15 +3267,40 @@ def test_firestore_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_firestore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FirestoreTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_firestore_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.FirestoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -3388,19 +3316,36 @@ def test_firestore_base_transport_with_credentials_file(): def test_firestore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.firestore_v1.services.firestore.transports.FirestoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.FirestoreTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_firestore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FirestoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_firestore_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) FirestoreClient() adc.assert_called_once_with( scopes=( @@ -3411,14 +3356,38 @@ def test_firestore_auth_adc(): ) -def test_firestore_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport,], +) +@requires_google_auth_gte_1_25_0 +def test_firestore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.FirestoreGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport,], +) +@requires_google_auth_lt_1_25_0 +def test_firestore_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -3428,12 +3397,123 @@ def test_firestore_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreGrpcTransport, grpc_helpers), + (transports.FirestoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_firestore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=["1", "2"], + default_host="firestore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreGrpcTransport, grpc_helpers), + (transports.FirestoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_firestore_transport_create_channel_old_api_core(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FirestoreGrpcTransport, grpc_helpers), + (transports.FirestoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_firestore_transport_create_channel_user_scopes(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "firestore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport], ) def test_firestore_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -3475,7 +3555,7 @@ def test_firestore_grpc_transport_client_cert_source_for_mtls(transport_class): def test_firestore_host_no_port(): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="firestore.googleapis.com" ), @@ -3485,7 +3565,7 @@ def test_firestore_host_no_port(): def test_firestore_host_with_port(): client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="firestore.googleapis.com:8000" ), @@ -3536,9 +3616,9 @@ def test_firestore_transport_channel_mtls_with_client_cert_source(transport_clas mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -3617,7 +3697,6 @@ def test_firestore_transport_channel_mtls_with_adc(transport_class): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3638,7 +3717,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = FirestoreClient.common_folder_path(folder) assert expected == actual @@ -3657,7 +3735,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = FirestoreClient.common_organization_path(organization) assert expected == actual @@ -3676,7 +3753,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = FirestoreClient.common_project_path(project) assert expected == actual @@ -3696,7 +3772,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -3723,7 +3798,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.FirestoreTransport, "_prep_wrapped_messages" ) as prep: client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3732,6 +3807,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = FirestoreClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)