diff --git a/.flake8 b/.flake8
index ed9316381c..29227d4cf4 100644
--- a/.flake8
+++ b/.flake8
@@ -26,6 +26,7 @@ exclude =
*_pb2.py
# Standard linting exemptions.
+ **/.nox/**
__pycache__,
.git,
*.pyc,
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f8063630ab..fd2f5f3c34 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -7,6 +7,7 @@
# The firestore-dpe team is the default owner for anything not
# explicitly taken by someone else.
-* @googleapis/firestore-dpe
+* @googleapis/firestore-dpe @googleapis/api-firestore @googleapis/yoshi-python
+
/samples/ @googleapis/firestore-dpe @googleapis/python-samples-owners
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000000..fc281c05bd
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 8e08cebce7..861c70e56f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -51,8 +51,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
@@ -60,3 +62,4 @@ system_tests/local_test_setup
# Make sure a generated file isn't accidentally committed.
pylintrc
pylintrc.test
+.make/**
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index f26796a0b9..bf132aa41d 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,7 +15,11 @@
set -eo pipefail
-cd github/python-firestore
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-firestore"
+fi
+
+cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -33,16 +37,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3.6 -m nox -s "${NOX_SESSION:-}"
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3.6 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
index 1118107829..2e8a0735a6 100644
--- a/.kokoro/docs/docs-presubmit.cfg
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -15,3 +15,14 @@ env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
value: "false"
}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-firestore/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index c841366a90..c87e9f2363 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do
python3.6 -m nox -s "$RUN_TESTS_SESSION"
EXIT=$?
- # If this is a periodic build, send the test log to the Build Cop Bot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
- $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
fi
if [[ $EXIT -ne 0 ]]; then
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index 719bcd5ba8..4af6cdc26d 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
"KOKORO_GITHUB_COMMIT"
"KOKORO_GITHUB_PULL_REQUEST_NUMBER"
"KOKORO_GITHUB_PULL_REQUEST_COMMIT"
- # For Build Cop Bot
+ # For FlakyBot
"KOKORO_GITHUB_COMMIT_URL"
"KOKORO_GITHUB_PULL_REQUEST_URL"
)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000000..a9024b15d7
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v3.4.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+- repo: https://github.com/psf/black
+ rev: 19.10b0
+ hooks:
+ - id: black
+- repo: https://gitlab.com/pycqa/flake8
+ rev: 3.8.4
+ hooks:
+ - id: flake8
diff --git a/.trampolinerc b/.trampolinerc
index 995ee29111..383b6ec89f 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,6 +24,7 @@ required_envvars+=(
pass_down_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
+ "NOX_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a3b9e06d57..acccd9ca61 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,32 @@
[1]: https://pypi.org/project/google-cloud-firestore/#history
+## [2.1.0](https://www.github.com/googleapis/python-firestore/compare/v2.0.2...v2.1.0) (2021-03-30)
+
+
+### Features
+
+* add firestore bundles ([#319](https://www.github.com/googleapis/python-firestore/issues/319)) ([c54de50](https://www.github.com/googleapis/python-firestore/commit/c54de50922b810cac030a71526bf62b9e4785a2f))
+* adds synthed bundle protos ([#325](https://www.github.com/googleapis/python-firestore/issues/325)) ([8fdf2a8](https://www.github.com/googleapis/python-firestore/commit/8fdf2a868c66f7ebec39b190deb5d3a8a38bbc96))
+* support using client credentials with emulator ([#269](https://www.github.com/googleapis/python-firestore/issues/269)) ([dffc580](https://www.github.com/googleapis/python-firestore/commit/dffc580472193c7d3b0eecadc316d77afaa0d0bf))
+
+
+### Bug Fixes
+
+* limit data transferred as part of list_documents as we don't require field data ([#280](https://www.github.com/googleapis/python-firestore/issues/280)) ([0386bc4](https://www.github.com/googleapis/python-firestore/commit/0386bc4824bd082410112c495963f13fb2489dfa))
+* patch emulator channel to be created accordingly ([#288](https://www.github.com/googleapis/python-firestore/issues/288)) ([1a973f3](https://www.github.com/googleapis/python-firestore/commit/1a973f37e86207925e705d3fccdc88875d5d3ad1))
+* remove client recv msg limit fix: add enums to `types/__init__.py` ([#274](https://www.github.com/googleapis/python-firestore/issues/274)) ([2b47c00](https://www.github.com/googleapis/python-firestore/commit/2b47c0072769ee47073c68ccab73733416aa0cef))
+* use correct retry deadline ([#331](https://www.github.com/googleapis/python-firestore/issues/331)) ([f9586d4](https://www.github.com/googleapis/python-firestore/commit/f9586d4d75390e0daccd1ef0902d11b4dcca6472))
+* use correct type hint for '*path' args ([#300](https://www.github.com/googleapis/python-firestore/issues/300)) ([15b579f](https://www.github.com/googleapis/python-firestore/commit/15b579f0b94aa8de3310b8bbc14916e97ac0c060))
+
+
+### Documentation
+
+* add documentation for documentsnapshot class ([#263](https://www.github.com/googleapis/python-firestore/issues/263)) ([448c965](https://www.github.com/googleapis/python-firestore/commit/448c96580da9e6db039cc3c69d2ac0b87ae9a05e))
+* clarify semantics of 'merge' argument to 'Document.set' ([#278](https://www.github.com/googleapis/python-firestore/issues/278)) ([29c6374](https://www.github.com/googleapis/python-firestore/commit/29c637490e43db59529edcd3b61ccfca383eb223)), closes [#277](https://www.github.com/googleapis/python-firestore/issues/277)
+* trailing whitespace ([#310](https://www.github.com/googleapis/python-firestore/issues/310)) ([b8192f0](https://www.github.com/googleapis/python-firestore/commit/b8192f018ef53f93a75d3623045e3fd356fba17f))
+* update intersphinx for grpc and auth ([#261](https://www.github.com/googleapis/python-firestore/issues/261)) ([1bbd3a0](https://www.github.com/googleapis/python-firestore/commit/1bbd3a0dca43714289f741e759d8aaa40e3ef600))
+
### [2.0.2](https://www.github.com/googleapis/python-firestore/compare/v2.0.1...v2.0.2) (2020-12-05)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 577a55d876..f996e6c473 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -21,8 +21,8 @@ In order to add a feature:
- The feature must be documented in both the API and narrative
documentation.
-- The feature must work fully on the following CPython versions: 2.7,
- 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows.
+- The feature must work fully on the following CPython versions:
+ 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -111,6 +120,16 @@ Coding Style
should point to the official ``googleapis`` checkout and the
the branch should be the main branch on that remote (``master``).
+- This repository contains configuration for the
+ `pre-commit `__ tool, which automates checking
+ our linters during a commit. If you have it installed on your ``$PATH``,
+ you can enable enforcing those checks via:
+
+.. code-block:: bash
+
+ $ pre-commit install
+ pre-commit installed at .git/hooks/pre-commit
+
Exceptions to PEP8:
- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
@@ -123,13 +142,18 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
@@ -152,6 +176,16 @@ Running System Tests
$ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
+**************************
+Updating Conformance Tests
+**************************
+
+The firestore client libraries use a shared set of conformance tests, the source of which can be found at https://github.com/googleapis/conformance-tests.
+
+To update the copy of these conformance tests used by this repository, run the provided Makefile:
+
+ $ make -f Makefile_v1
+
*************
Test Coverage
*************
@@ -192,25 +226,24 @@ Supported Python Versions
We support:
-- `Python 3.5`_
- `Python 3.6`_
- `Python 3.7`_
- `Python 3.8`_
+- `Python 3.9`_
-.. _Python 3.5: https://docs.python.org/3.5/
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
+.. _Python 3.9: https://docs.python.org/3.9/
Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-firestore/blob/master/noxfile.py
-Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
We also explicitly decided to support Python 3 beginning with version
-3.5. Reasons for this include:
+3.6. Reasons for this include:
- Encouraging use of newest versions of Python 3
- Taking the lead of `prominent`_ open-source `projects`_
diff --git a/LICENSE b/LICENSE
index a8ee855de2..d645695673 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
- Apache License
+
+ Apache License
Version 2.0, January 2004
- https://www.apache.org/licenses/
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d1203..e783f4c620 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/Makefile_v1 b/Makefile_v1
index 1648687e27..5aa75fc93d 100644
--- a/Makefile_v1
+++ b/Makefile_v1
@@ -5,23 +5,32 @@
PROTOC = protoc
# Dependent repos.
-REPO_DIR=$(HOME)/git-repos
-PROTOBUF_REPO = $(REPO_DIR)/protobuf
-GOOGLEAPIS_REPO = $(REPO_DIR)/googleapis
-TESTS_REPO = $(REPO_DIR)/conformance-tests
+REPO_DIR = $(shell pwd)
+BUILD_DIR = $(shell pwd)/.make
+# This requires a few other repositories, assumed to be in the same root
+# of this repository.
+# => git clone git@github.com:protocolbuffers/protobuf
+PROTOBUF_REPO = $(BUILD_DIR)/protobuf
+# => git clone git@github.com:googleapis/googleapis.git
+GOOGLEAPIS_REPO = $(BUILD_DIR)/googleapis
+# => git clone git@github.com:googleapis/conformance-tests.git
+TESTS_REPO = $(BUILD_DIR)/conformance-tests
+
TEST_PROTO_DIR = $(TESTS_REPO)/firestore/v1
TEST_PROTO_SRC = $(TEST_PROTO_DIR)/proto/google/cloud/conformance/firestore/v1/tests.proto
-TESTDATA_DIR = `pwd`/tests/unit/v1/testdata/
+TESTDATA_DIR = $(REPO_DIR)/tests/unit/v1/testdata/
-TMPDIR = /tmp/python-fs-proto
+TMPDIR = $(BUILD_DIR)/python-fs-proto
TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1/types
TEST_PROTO_COPY = $(TMPDIR_FS)/tests.proto
TEST_GEN_OUT = tests/unit/v1/conformance_tests.py
-OUTDIR = /tmp/python-fs-gen
+OUTDIR = $(BUILD_DIR)/python-fs-gen
+
+.PHONY: sync gen-protos docker-pull all format clean
-.PHONY: sync-protos gen-protos docker-pull
+all: gen-protos copy-testdata clean
-gen-protos: sync-protos tweak-protos docker-pull gen-protos-raw
+gen-protos: sync tweak-protos docker-pull gen-protos-raw format
gen-protos-raw:
mkdir -p $(OUTDIR)
@@ -48,10 +57,13 @@ tweak-protos:
sed -i -e 's@google\.firestore\.v1@google.cloud.firestore_v1@' $(TEST_PROTO_COPY)
sed -i -e 's@Cursor@Cursor_@' $(TEST_PROTO_COPY)
-sync-protos:
- cd $(PROTOBUF_REPO); git pull
- cd $(GOOGLEAPIS_REPO); git pull
- cd $(TESTS_REPO); git pull
+sync: clean
+ mkdir -p $(PROTOBUF_REPO)
+ git clone --depth 1 git@github.com:protocolbuffers/protobuf $(PROTOBUF_REPO)
+ mkdir -p $(GOOGLEAPIS_REPO)
+ git clone --depth 1 git@github.com:googleapis/googleapis.git $(GOOGLEAPIS_REPO)
+ mkdir -p $(TESTS_REPO)
+ git clone --depth 1 git@github.com:googleapis/conformance-tests.git $(TESTS_REPO)
docker-pull:
docker pull gcr.io/gapic-images/gapic-generator-python:latest
@@ -59,3 +71,9 @@ docker-pull:
copy-testdata:
rm $(TESTDATA_DIR)/*.json
cp $(TEST_PROTO_DIR)/*.json $(TESTDATA_DIR)/
+
+format:
+ nox -s blacken
+
+clean:
+ rm -rf $(BUILD_DIR)
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 0abaf229fc..bcd37bbd3c 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,9 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
diff --git a/docs/batch.rst b/docs/batch.rst
index d130d03791..db732c5492 100644
--- a/docs/batch.rst
+++ b/docs/batch.rst
@@ -1,6 +1,14 @@
Batches
~~~~~~~
+.. automodule:: google.cloud.firestore_v1.base_batch
+ :members:
+ :show-inheritance:
+
.. automodule:: google.cloud.firestore_v1.batch
:members:
:show-inheritance:
+
+.. automodule:: google.cloud.firestore_v1.async_batch
+ :members:
+ :show-inheritance:
diff --git a/docs/client.rst b/docs/client.rst
index c42eb43470..79811483a0 100644
--- a/docs/client.rst
+++ b/docs/client.rst
@@ -1,6 +1,14 @@
Client
~~~~~~
+.. automodule:: google.cloud.firestore_v1.base_client
+ :members:
+ :show-inheritance:
+
.. automodule:: google.cloud.firestore_v1.client
:members:
:show-inheritance:
+
+.. automodule:: google.cloud.firestore_v1.async_client
+ :members:
+ :show-inheritance:
diff --git a/docs/collection.rst b/docs/collection.rst
index 22d4d8243e..1bc227f9f2 100644
--- a/docs/collection.rst
+++ b/docs/collection.rst
@@ -1,6 +1,14 @@
Collections
~~~~~~~~~~~
+.. automodule:: google.cloud.firestore_v1.base_collection
+ :members:
+ :show-inheritance:
+
.. automodule:: google.cloud.firestore_v1.collection
:members:
:show-inheritance:
+
+.. automodule:: google.cloud.firestore_v1.async_collection
+ :members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index f7af7c5d78..22838f8c0b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -345,10 +345,10 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- "python": ("http://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
- "grpc": ("https://grpc.io/grpc/python/", None),
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/docs/document.rst b/docs/document.rst
index bc04dd4443..163a9819d5 100644
--- a/docs/document.rst
+++ b/docs/document.rst
@@ -1,6 +1,14 @@
Documents
~~~~~~~~~
+.. automodule:: google.cloud.firestore_v1.base_document
+ :members:
+ :show-inheritance:
+
.. automodule:: google.cloud.firestore_v1.document
:members:
:show-inheritance:
+
+.. automodule:: google.cloud.firestore_v1.async_document
+ :members:
+ :show-inheritance:
diff --git a/docs/query.rst b/docs/query.rst
index 8f4117671c..3590112b6d 100644
--- a/docs/query.rst
+++ b/docs/query.rst
@@ -1,6 +1,14 @@
Queries
~~~~~~~
+.. automodule:: google.cloud.firestore_v1.base_query
+ :members:
+ :show-inheritance:
+
.. automodule:: google.cloud.firestore_v1.query
:members:
:show-inheritance:
+
+.. automodule:: google.cloud.firestore_v1.async_query
+ :members:
+ :show-inheritance:
diff --git a/docs/transaction.rst b/docs/transaction.rst
index 97e670a349..ef3d77f5d2 100644
--- a/docs/transaction.rst
+++ b/docs/transaction.rst
@@ -1,7 +1,17 @@
Transactions
~~~~~~~~~~~~
+.. automodule:: google.cloud.firestore_v1.base_transaction
+ :inherited-members:
+ :members:
+ :show-inheritance:
+
.. automodule:: google.cloud.firestore_v1.transaction
:inherited-members:
:members:
:show-inheritance:
+
+.. automodule:: google.cloud.firestore_v1.async_transaction
+ :inherited-members:
+ :members:
+ :show-inheritance:
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py
index 92ead923b0..ad6f760b84 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py
@@ -94,7 +94,36 @@ class FirestoreAdminAsyncClient:
FirestoreAdminClient.parse_common_location_path
)
- from_service_account_file = FirestoreAdminClient.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreAdminAsyncClient: The constructed client.
+ """
+ return FirestoreAdminClient.from_service_account_info.__func__(FirestoreAdminAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreAdminAsyncClient: The constructed client.
+ """
+ return FirestoreAdminClient.from_service_account_file.__func__(FirestoreAdminAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
@@ -175,18 +204,20 @@ async def create_index(
[IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
Args:
- request (:class:`~.firestore_admin.CreateIndexRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.CreateIndexRequest`):
The request object. The request for
[FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
parent (:class:`str`):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- index (:class:`~.gfa_index.Index`):
+ index (:class:`google.cloud.firestore_admin_v1.types.Index`):
Required. The composite index to
create.
+
This corresponds to the ``index`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -198,13 +229,11 @@ async def create_index(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.gfa_index.Index``: Cloud Firestore indexes
- enable simple and complex queries against documents in a
- database.
+ The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against
+ documents in a database.
"""
# Create or coerce a protobuf request object.
@@ -267,12 +296,13 @@ async def list_indexes(
r"""Lists composite indexes.
Args:
- request (:class:`~.firestore_admin.ListIndexesRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.ListIndexesRequest`):
The request object. The request for
[FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
parent (:class:`str`):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -284,7 +314,7 @@ async def list_indexes(
sent along with the request as metadata.
Returns:
- ~.pagers.ListIndexesAsyncPager:
+ google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesAsyncPager:
The response for
[FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
@@ -323,6 +353,7 @@ async def list_indexes(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -358,12 +389,13 @@ async def get_index(
r"""Gets a composite index.
Args:
- request (:class:`~.firestore_admin.GetIndexRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.GetIndexRequest`):
The request object. The request for
[FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
name (:class:`str`):
Required. A name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -375,7 +407,7 @@ async def get_index(
sent along with the request as metadata.
Returns:
- ~.index.Index:
+ google.cloud.firestore_admin_v1.types.Index:
Cloud Firestore indexes enable simple
and complex queries against documents in
a database.
@@ -412,6 +444,7 @@ async def get_index(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -441,12 +474,13 @@ async def delete_index(
r"""Deletes a composite index.
Args:
- request (:class:`~.firestore_admin.DeleteIndexRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.DeleteIndexRequest`):
The request object. The request for
[FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
name (:class:`str`):
Required. A name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -488,6 +522,7 @@ async def delete_index(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -516,12 +551,13 @@ async def get_field(
r"""Gets the metadata and configuration for a Field.
Args:
- request (:class:`~.firestore_admin.GetFieldRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.GetFieldRequest`):
The request object. The request for
[FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
name (:class:`str`):
Required. A name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -533,7 +569,7 @@ async def get_field(
sent along with the request as metadata.
Returns:
- ~.field.Field:
+ google.cloud.firestore_admin_v1.types.Field:
Represents a single field in the
database.
Fields are grouped by their "Collection
@@ -572,6 +608,7 @@ async def get_field(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -616,10 +653,10 @@ async def update_field(
``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
Args:
- request (:class:`~.firestore_admin.UpdateFieldRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.UpdateFieldRequest`):
The request object. The request for
[FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
- field (:class:`~.gfa_field.Field`):
+ field (:class:`google.cloud.firestore_admin_v1.types.Field`):
Required. The field to be updated.
This corresponds to the ``field`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -632,16 +669,16 @@ async def update_field(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.gfa_field.Field``: Represents a single field
- in the database.
+ :class:`google.cloud.firestore_admin_v1.types.Field`
+ Represents a single field in the database.
- Fields are grouped by their "Collection Group", which
- represent all collections in the database with the same
- id.
+ Fields are grouped by their "Collection Group", which
+ represent all collections in the database with the
+ same id.
"""
# Create or coerce a protobuf request object.
@@ -711,12 +748,13 @@ async def list_fields(
with the filter set to ``indexConfig.usesAncestorConfig:false``.
Args:
- request (:class:`~.firestore_admin.ListFieldsRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.ListFieldsRequest`):
The request object. The request for
[FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
parent (:class:`str`):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -728,7 +766,7 @@ async def list_fields(
sent along with the request as metadata.
Returns:
- ~.pagers.ListFieldsAsyncPager:
+ google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsAsyncPager:
The response for
[FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
@@ -767,6 +805,7 @@ async def list_fields(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -811,12 +850,13 @@ async def export_documents(
Google Cloud Storage.
Args:
- request (:class:`~.firestore_admin.ExportDocumentsRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.ExportDocumentsRequest`):
The request object. The request for
[FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
name (:class:`str`):
Required. Database to export. Should be of the form:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -828,11 +868,11 @@ async def export_documents(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.gfa_operation.ExportDocumentsResponse``:
+ :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse`
Returned in the
[google.longrunning.Operation][google.longrunning.Operation]
response field.
@@ -902,12 +942,13 @@ async def import_documents(
already been imported to Cloud Firestore.
Args:
- request (:class:`~.firestore_admin.ImportDocumentsRequest`):
+ request (:class:`google.cloud.firestore_admin_v1.types.ImportDocumentsRequest`):
The request object. The request for
[FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
name (:class:`str`):
Required. Database to import into. Should be of the
form: ``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -919,24 +960,22 @@ async def import_documents(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.empty.Empty``: A generic empty message that
- you can re-use to avoid defining duplicated empty
- messages in your APIs. A typical example is to use it as
- the request or the response type of an API method. For
- instance:
+ The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to
+ use it as the request or the response type of an API
+ method. For instance:
- ::
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns
+ (google.protobuf.Empty);
- service Foo {
- rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- }
+ }
- The JSON representation for ``Empty`` is empty JSON
- object ``{}``.
+ The JSON representation for Empty is empty JSON
+ object {}.
"""
# Create or coerce a protobuf request object.
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py
index 28ac8c7d5f..dd8cf373d1 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py
@@ -120,6 +120,22 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreAdminClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -132,7 +148,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ FirestoreAdminClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -285,10 +301,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.FirestoreAdminTransport]): The
+ transport (Union[str, FirestoreAdminTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -324,21 +340,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -381,7 +393,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -403,18 +415,20 @@ def create_index(
[IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata].
Args:
- request (:class:`~.firestore_admin.CreateIndexRequest`):
+ request (google.cloud.firestore_admin_v1.types.CreateIndexRequest):
The request object. The request for
[FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
- parent (:class:`str`):
+ parent (str):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- index (:class:`~.gfa_index.Index`):
+ index (google.cloud.firestore_admin_v1.types.Index):
Required. The composite index to
create.
+
This corresponds to the ``index`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -426,13 +440,11 @@ def create_index(
sent along with the request as metadata.
Returns:
- ~.ga_operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.gfa_index.Index``: Cloud Firestore indexes
- enable simple and complex queries against documents in a
- database.
+ The result type for the operation will be :class:`google.cloud.firestore_admin_v1.types.Index` Cloud Firestore indexes enable simple and complex queries against
+ documents in a database.
"""
# Create or coerce a protobuf request object.
@@ -496,12 +508,13 @@ def list_indexes(
r"""Lists composite indexes.
Args:
- request (:class:`~.firestore_admin.ListIndexesRequest`):
+ request (google.cloud.firestore_admin_v1.types.ListIndexesRequest):
The request object. The request for
[FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
- parent (:class:`str`):
+ parent (str):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -513,7 +526,7 @@ def list_indexes(
sent along with the request as metadata.
Returns:
- ~.pagers.ListIndexesPager:
+ google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListIndexesPager:
The response for
[FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
@@ -578,12 +591,13 @@ def get_index(
r"""Gets a composite index.
Args:
- request (:class:`~.firestore_admin.GetIndexRequest`):
+ request (google.cloud.firestore_admin_v1.types.GetIndexRequest):
The request object. The request for
[FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex].
- name (:class:`str`):
+ name (str):
Required. A name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -595,7 +609,7 @@ def get_index(
sent along with the request as metadata.
Returns:
- ~.index.Index:
+ google.cloud.firestore_admin_v1.types.Index:
Cloud Firestore indexes enable simple
and complex queries against documents in
a database.
@@ -652,12 +666,13 @@ def delete_index(
r"""Deletes a composite index.
Args:
- request (:class:`~.firestore_admin.DeleteIndexRequest`):
+ request (google.cloud.firestore_admin_v1.types.DeleteIndexRequest):
The request object. The request for
[FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex].
- name (:class:`str`):
+ name (str):
Required. A name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -718,12 +733,13 @@ def get_field(
r"""Gets the metadata and configuration for a Field.
Args:
- request (:class:`~.firestore_admin.GetFieldRequest`):
+ request (google.cloud.firestore_admin_v1.types.GetFieldRequest):
The request object. The request for
[FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField].
- name (:class:`str`):
+ name (str):
Required. A name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -735,7 +751,7 @@ def get_field(
sent along with the request as metadata.
Returns:
- ~.field.Field:
+ google.cloud.firestore_admin_v1.types.Field:
Represents a single field in the
database.
Fields are grouped by their "Collection
@@ -809,10 +825,10 @@ def update_field(
``projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*``.
Args:
- request (:class:`~.firestore_admin.UpdateFieldRequest`):
+ request (google.cloud.firestore_admin_v1.types.UpdateFieldRequest):
The request object. The request for
[FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
- field (:class:`~.gfa_field.Field`):
+ field (google.cloud.firestore_admin_v1.types.Field):
Required. The field to be updated.
This corresponds to the ``field`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -825,16 +841,16 @@ def update_field(
sent along with the request as metadata.
Returns:
- ~.ga_operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.gfa_field.Field``: Represents a single field
- in the database.
+ :class:`google.cloud.firestore_admin_v1.types.Field`
+ Represents a single field in the database.
- Fields are grouped by their "Collection Group", which
- represent all collections in the database with the same
- id.
+ Fields are grouped by their "Collection Group", which
+ represent all collections in the database with the
+ same id.
"""
# Create or coerce a protobuf request object.
@@ -905,12 +921,13 @@ def list_fields(
with the filter set to ``indexConfig.usesAncestorConfig:false``.
Args:
- request (:class:`~.firestore_admin.ListFieldsRequest`):
+ request (google.cloud.firestore_admin_v1.types.ListFieldsRequest):
The request object. The request for
[FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
- parent (:class:`str`):
+ parent (str):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -922,7 +939,7 @@ def list_fields(
sent along with the request as metadata.
Returns:
- ~.pagers.ListFieldsPager:
+ google.cloud.firestore_admin_v1.services.firestore_admin.pagers.ListFieldsPager:
The response for
[FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
@@ -996,12 +1013,13 @@ def export_documents(
Google Cloud Storage.
Args:
- request (:class:`~.firestore_admin.ExportDocumentsRequest`):
+ request (google.cloud.firestore_admin_v1.types.ExportDocumentsRequest):
The request object. The request for
[FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
- name (:class:`str`):
+ name (str):
Required. Database to export. Should be of the form:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -1013,11 +1031,11 @@ def export_documents(
sent along with the request as metadata.
Returns:
- ~.ga_operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
- :class:``~.gfa_operation.ExportDocumentsResponse``:
+ :class:`google.cloud.firestore_admin_v1.types.ExportDocumentsResponse`
Returned in the
[google.longrunning.Operation][google.longrunning.Operation]
response field.
@@ -1088,12 +1106,13 @@ def import_documents(
already been imported to Cloud Firestore.
Args:
- request (:class:`~.firestore_admin.ImportDocumentsRequest`):
+ request (google.cloud.firestore_admin_v1.types.ImportDocumentsRequest):
The request object. The request for
[FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
- name (:class:`str`):
+ name (str):
Required. Database to import into. Should be of the
form: ``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -1105,24 +1124,22 @@ def import_documents(
sent along with the request as metadata.
Returns:
- ~.ga_operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.empty.Empty``: A generic empty message that
- you can re-use to avoid defining duplicated empty
- messages in your APIs. A typical example is to use it as
- the request or the response type of an API method. For
- instance:
+ The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to
+ use it as the request or the response type of an API
+ method. For instance:
- ::
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns
+ (google.protobuf.Empty);
- service Foo {
- rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- }
+ }
- The JSON representation for ``Empty`` is empty JSON
- object ``{}``.
+ The JSON representation for Empty is empty JSON
+ object {}.
"""
# Create or coerce a protobuf request object.
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py
index 2525da38a8..0b51a2c851 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.firestore_admin_v1.types import field
from google.cloud.firestore_admin_v1.types import firestore_admin
@@ -26,7 +35,7 @@ class ListIndexesPager:
"""A pager for iterating through ``list_indexes`` requests.
This class thinly wraps an initial
- :class:`~.firestore_admin.ListIndexesResponse` object, and
+ :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and
provides an ``__iter__`` method to iterate through its
``indexes`` field.
@@ -35,7 +44,7 @@ class ListIndexesPager:
through the ``indexes`` field on the
corresponding responses.
- All the usual :class:`~.firestore_admin.ListIndexesResponse`
+ All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -53,9 +62,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore_admin.ListIndexesRequest`):
+ request (google.cloud.firestore_admin_v1.types.ListIndexesRequest):
The initial request object.
- response (:class:`~.firestore_admin.ListIndexesResponse`):
+ response (google.cloud.firestore_admin_v1.types.ListIndexesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -88,7 +97,7 @@ class ListIndexesAsyncPager:
"""A pager for iterating through ``list_indexes`` requests.
This class thinly wraps an initial
- :class:`~.firestore_admin.ListIndexesResponse` object, and
+ :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``indexes`` field.
@@ -97,7 +106,7 @@ class ListIndexesAsyncPager:
through the ``indexes`` field on the
corresponding responses.
- All the usual :class:`~.firestore_admin.ListIndexesResponse`
+ All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -115,9 +124,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore_admin.ListIndexesRequest`):
+ request (google.cloud.firestore_admin_v1.types.ListIndexesRequest):
The initial request object.
- response (:class:`~.firestore_admin.ListIndexesResponse`):
+ response (google.cloud.firestore_admin_v1.types.ListIndexesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -154,7 +163,7 @@ class ListFieldsPager:
"""A pager for iterating through ``list_fields`` requests.
This class thinly wraps an initial
- :class:`~.firestore_admin.ListFieldsResponse` object, and
+ :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and
provides an ``__iter__`` method to iterate through its
``fields`` field.
@@ -163,7 +172,7 @@ class ListFieldsPager:
through the ``fields`` field on the
corresponding responses.
- All the usual :class:`~.firestore_admin.ListFieldsResponse`
+ All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -181,9 +190,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore_admin.ListFieldsRequest`):
+ request (google.cloud.firestore_admin_v1.types.ListFieldsRequest):
The initial request object.
- response (:class:`~.firestore_admin.ListFieldsResponse`):
+ response (google.cloud.firestore_admin_v1.types.ListFieldsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -216,7 +225,7 @@ class ListFieldsAsyncPager:
"""A pager for iterating through ``list_fields`` requests.
This class thinly wraps an initial
- :class:`~.firestore_admin.ListFieldsResponse` object, and
+ :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``fields`` field.
@@ -225,7 +234,7 @@ class ListFieldsAsyncPager:
through the ``fields`` field on the
corresponding responses.
- All the usual :class:`~.firestore_admin.ListFieldsResponse`
+ All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -243,9 +252,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore_admin.ListFieldsRequest`):
+ request (google.cloud.firestore_admin_v1.types.ListFieldsRequest):
The initial request object.
- response (:class:`~.firestore_admin.ListFieldsResponse`):
+ response (google.cloud.firestore_admin_v1.types.ListFieldsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py
index 08dd3f989b..7ddd11ebd5 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py
@@ -28,7 +28,6 @@
_transport_registry["grpc"] = FirestoreAdminGrpcTransport
_transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport
-
__all__ = (
"FirestoreAdminTransport",
"FirestoreAdminGrpcTransport",
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py
index ac4c4475f5..f81e653de7 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py
@@ -77,10 +77,10 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -88,6 +88,9 @@ def __init__(
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
@@ -97,20 +100,17 @@ def __init__(
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ scopes=self._scopes, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -128,6 +128,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -143,6 +144,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -158,6 +160,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -173,6 +176,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -191,6 +195,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py
index dd94987053..b3472f2576 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py
@@ -63,6 +63,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -93,6 +94,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -107,83 +112,71 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
- self._ssl_channel_credentials = ssl_credentials
else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- self._stubs = {} # type: Dict[str, Callable]
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
@classmethod
def create_channel(
cls,
@@ -196,7 +189,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -243,13 +236,11 @@ def operations_client(self) -> operations_v1.OperationsClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsClient(
- self.grpc_channel
- )
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def create_index(
diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py
index 4221895f34..927c5dc9a9 100644
--- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py
+++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py
@@ -67,7 +67,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -107,6 +107,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -138,12 +139,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -152,82 +157,70 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
- self._ssl_channel_credentials = ssl_credentials
else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
- self._stubs = {}
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
@@ -247,13 +240,13 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient(
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def create_index(
diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py
index f5cbaa99c9..f6838c6248 100644
--- a/google/cloud/firestore_admin_v1/types/__init__.py
+++ b/google/cloud/firestore_admin_v1/types/__init__.py
@@ -15,51 +15,52 @@
# limitations under the License.
#
-from .index import Index
from .field import Field
from .firestore_admin import (
CreateIndexRequest,
- ListIndexesRequest,
- ListIndexesResponse,
- GetIndexRequest,
DeleteIndexRequest,
- UpdateFieldRequest,
+ ExportDocumentsRequest,
GetFieldRequest,
+ GetIndexRequest,
+ ImportDocumentsRequest,
ListFieldsRequest,
ListFieldsResponse,
- ExportDocumentsRequest,
- ImportDocumentsRequest,
+ ListIndexesRequest,
+ ListIndexesResponse,
+ UpdateFieldRequest,
)
+from .index import Index
from .location import LocationMetadata
from .operation import (
- IndexOperationMetadata,
- FieldOperationMetadata,
ExportDocumentsMetadata,
- ImportDocumentsMetadata,
ExportDocumentsResponse,
+ FieldOperationMetadata,
+ ImportDocumentsMetadata,
+ IndexOperationMetadata,
Progress,
+ OperationState,
)
-
__all__ = (
- "Index",
"Field",
"CreateIndexRequest",
- "ListIndexesRequest",
- "ListIndexesResponse",
- "GetIndexRequest",
"DeleteIndexRequest",
- "UpdateFieldRequest",
+ "ExportDocumentsRequest",
"GetFieldRequest",
+ "GetIndexRequest",
+ "ImportDocumentsRequest",
"ListFieldsRequest",
"ListFieldsResponse",
- "ExportDocumentsRequest",
- "ImportDocumentsRequest",
+ "ListIndexesRequest",
+ "ListIndexesResponse",
+ "UpdateFieldRequest",
+ "Index",
"LocationMetadata",
- "IndexOperationMetadata",
- "FieldOperationMetadata",
"ExportDocumentsMetadata",
- "ImportDocumentsMetadata",
"ExportDocumentsResponse",
+ "FieldOperationMetadata",
+ "ImportDocumentsMetadata",
+ "IndexOperationMetadata",
"Progress",
+ "OperationState",
)
diff --git a/google/cloud/firestore_admin_v1/types/field.py b/google/cloud/firestore_admin_v1/types/field.py
index b63869b6e6..00f1fa29bc 100644
--- a/google/cloud/firestore_admin_v1/types/field.py
+++ b/google/cloud/firestore_admin_v1/types/field.py
@@ -55,7 +55,7 @@ class Field(proto.Message):
Indexes defined on this ``Field`` will be applied to all
fields which do not have their own ``Field`` index
configuration.
- index_config (~.field.Field.IndexConfig):
+ index_config (google.cloud.firestore_admin_v1.types.Field.IndexConfig):
The index configuration for this field. If unset, field
indexing will revert to the configuration defined by the
``ancestor_field``. To explicitly remove all indexes for
@@ -67,7 +67,7 @@ class IndexConfig(proto.Message):
r"""The index configuration for this field.
Attributes:
- indexes (Sequence[~.index.Index]):
+ indexes (Sequence[google.cloud.firestore_admin_v1.types.Index]):
The indexes supported for this field.
uses_ancestor_config (bool):
Output only. When true, the ``Field``'s index configuration
diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py
index 7a365edb34..d3eae822ca 100644
--- a/google/cloud/firestore_admin_v1/types/firestore_admin.py
+++ b/google/cloud/firestore_admin_v1/types/firestore_admin.py
@@ -49,7 +49,7 @@ class CreateIndexRequest(proto.Message):
parent (str):
Required. A parent name of the form
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}``
- index (~.gfa_index.Index):
+ index (google.cloud.firestore_admin_v1.types.Index):
Required. The composite index to create.
"""
@@ -90,7 +90,7 @@ class ListIndexesResponse(proto.Message):
[FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes].
Attributes:
- indexes (Sequence[~.gfa_index.Index]):
+ indexes (Sequence[google.cloud.firestore_admin_v1.types.Index]):
The requested indexes.
next_page_token (str):
A page token that may be used to request
@@ -138,9 +138,9 @@ class UpdateFieldRequest(proto.Message):
[FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
Attributes:
- field (~.gfa_field.Field):
+ field (google.cloud.firestore_admin_v1.types.Field):
Required. The field to be updated.
- update_mask (~.field_mask.FieldMask):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
A mask, relative to the field. If specified, only
configuration specified by this field_mask will be updated
in the field.
@@ -202,7 +202,7 @@ class ListFieldsResponse(proto.Message):
[FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields].
Attributes:
- fields (Sequence[~.gfa_field.Field]):
+ fields (Sequence[google.cloud.firestore_admin_v1.types.Field]):
The requested fields.
next_page_token (str):
A page token that may be used to request
diff --git a/google/cloud/firestore_admin_v1/types/index.py b/google/cloud/firestore_admin_v1/types/index.py
index 3f10dfb081..cbac4cf9dd 100644
--- a/google/cloud/firestore_admin_v1/types/index.py
+++ b/google/cloud/firestore_admin_v1/types/index.py
@@ -31,7 +31,7 @@ class Index(proto.Message):
of this name for composite indexes will be:
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}``
For single field indexes, this field will be empty.
- query_scope (~.index.Index.QueryScope):
+ query_scope (google.cloud.firestore_admin_v1.types.Index.QueryScope):
Indexes with a collection query scope
specified allow queries against a collection
that is the child of a specific document,
@@ -42,7 +42,7 @@ class Index(proto.Message):
descended from a specific document, specified at
query time, and that have the same collection id
as this index.
- fields (Sequence[~.index.Index.IndexField]):
+ fields (Sequence[google.cloud.firestore_admin_v1.types.Index.IndexField]):
The fields supported by this index.
For composite indexes, this is always 2 or more fields. The
@@ -57,7 +57,7 @@ class Index(proto.Message):
For single field indexes, this will always be exactly one
entry with a field path equal to the field path of the
associated field.
- state (~.index.Index.State):
+ state (google.cloud.firestore_admin_v1.types.Index.State):
Output only. The serving state of the index.
"""
@@ -89,11 +89,11 @@ class IndexField(proto.Message):
field_path (str):
Can be **name**. For single field indexes, this must match
the name of the field or may be omitted.
- order (~.index.Index.IndexField.Order):
+ order (google.cloud.firestore_admin_v1.types.Index.IndexField.Order):
Indicates that this field supports ordering
by the specified order or comparing using =, <,
<=, >, >=.
- array_config (~.index.Index.IndexField.ArrayConfig):
+ array_config (google.cloud.firestore_admin_v1.types.Index.IndexField.ArrayConfig):
Indicates that this field supports operations on
``array_value``\ s.
"""
diff --git a/google/cloud/firestore_admin_v1/types/operation.py b/google/cloud/firestore_admin_v1/types/operation.py
index 29e902f46c..628b27ccb4 100644
--- a/google/cloud/firestore_admin_v1/types/operation.py
+++ b/google/cloud/firestore_admin_v1/types/operation.py
@@ -55,21 +55,21 @@ class IndexOperationMetadata(proto.Message):
[FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex].
Attributes:
- start_time (~.timestamp.Timestamp):
+ start_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation started.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation completed. Will be
unset if operation still in progress.
index (str):
The index resource that this operation is acting on. For
example:
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}``
- state (~.operation.OperationState):
+ state (google.cloud.firestore_admin_v1.types.OperationState):
The state of the operation.
- progress_documents (~.operation.Progress):
+ progress_documents (google.cloud.firestore_admin_v1.types.Progress):
The progress, in documents, of this
operation.
- progress_bytes (~.operation.Progress):
+ progress_bytes (google.cloud.firestore_admin_v1.types.Progress):
The progress, in bytes, of this operation.
"""
@@ -93,25 +93,25 @@ class FieldOperationMetadata(proto.Message):
[FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField].
Attributes:
- start_time (~.timestamp.Timestamp):
+ start_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation started.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation completed. Will be
unset if operation still in progress.
field (str):
The field resource that this operation is acting on. For
example:
``projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}``
- index_config_deltas (Sequence[~.operation.FieldOperationMetadata.IndexConfigDelta]):
+ index_config_deltas (Sequence[google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta]):
A list of
[IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta],
which describe the intent of this operation.
- state (~.operation.OperationState):
+ state (google.cloud.firestore_admin_v1.types.OperationState):
The state of the operation.
- progress_documents (~.operation.Progress):
+ progress_documents (google.cloud.firestore_admin_v1.types.Progress):
The progress, in documents, of this
operation.
- progress_bytes (~.operation.Progress):
+ progress_bytes (google.cloud.firestore_admin_v1.types.Progress):
The progress, in bytes, of this operation.
"""
@@ -119,9 +119,9 @@ class IndexConfigDelta(proto.Message):
r"""Information about an index configuration change.
Attributes:
- change_type (~.operation.FieldOperationMetadata.IndexConfigDelta.ChangeType):
+ change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.IndexConfigDelta.ChangeType):
Specifies how the index is changing.
- index (~.gfa_index.Index):
+ index (google.cloud.firestore_admin_v1.types.Index):
The index being changed.
"""
@@ -163,17 +163,17 @@ class ExportDocumentsMetadata(proto.Message):
[FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments].
Attributes:
- start_time (~.timestamp.Timestamp):
+ start_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation started.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation completed. Will be
unset if operation still in progress.
- operation_state (~.operation.OperationState):
+ operation_state (google.cloud.firestore_admin_v1.types.OperationState):
The state of the export operation.
- progress_documents (~.operation.Progress):
+ progress_documents (google.cloud.firestore_admin_v1.types.Progress):
The progress, in documents, of this
operation.
- progress_bytes (~.operation.Progress):
+ progress_bytes (google.cloud.firestore_admin_v1.types.Progress):
The progress, in bytes, of this operation.
collection_ids (Sequence[str]):
Which collection ids are being exported.
@@ -203,17 +203,17 @@ class ImportDocumentsMetadata(proto.Message):
[FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments].
Attributes:
- start_time (~.timestamp.Timestamp):
+ start_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation started.
- end_time (~.timestamp.Timestamp):
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
The time this operation completed. Will be
unset if operation still in progress.
- operation_state (~.operation.OperationState):
+ operation_state (google.cloud.firestore_admin_v1.types.OperationState):
The state of the import operation.
- progress_documents (~.operation.Progress):
+ progress_documents (google.cloud.firestore_admin_v1.types.Progress):
The progress, in documents, of this
operation.
- progress_bytes (~.operation.Progress):
+ progress_bytes (google.cloud.firestore_admin_v1.types.Progress):
The progress, in bytes, of this operation.
collection_ids (Sequence[str]):
Which collection ids are being imported.
diff --git a/google/cloud/firestore_bundle/__init__.py b/google/cloud/firestore_bundle/__init__.py
new file mode 100644
index 0000000000..d1ffaeff58
--- /dev/null
+++ b/google/cloud/firestore_bundle/__init__.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .types.bundle import BundleElement
+from .types.bundle import BundleMetadata
+from .types.bundle import BundledDocumentMetadata
+from .types.bundle import BundledQuery
+from .types.bundle import NamedQuery
+
+from .bundle import FirestoreBundle
+
+
+__all__ = (
+ "BundleElement",
+ "BundleMetadata",
+ "BundledDocumentMetadata",
+ "NamedQuery",
+ "BundledQuery",
+ "FirestoreBundle",
+)
diff --git a/google/cloud/firestore_bundle/_helpers.py b/google/cloud/firestore_bundle/_helpers.py
new file mode 100644
index 0000000000..8b7ce7a698
--- /dev/null
+++ b/google/cloud/firestore_bundle/_helpers.py
@@ -0,0 +1,13 @@
+from google.cloud.firestore_v1.base_query import BaseQuery
+from google.cloud.firestore_bundle.types import BundledQuery
+
+
+def limit_type_of_query(query: BaseQuery) -> int:
+ """BundledQuery.LimitType equivalent of this query.
+ """
+
+ return (
+ BundledQuery.LimitType.LAST
+ if query._limit_to_last
+ else BundledQuery.LimitType.FIRST
+ )
diff --git a/google/cloud/firestore_bundle/bundle.py b/google/cloud/firestore_bundle/bundle.py
new file mode 100644
index 0000000000..eae1fa3f4a
--- /dev/null
+++ b/google/cloud/firestore_bundle/bundle.py
@@ -0,0 +1,362 @@
+# Copyright 2021 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes for representing bundles for the Google Cloud Firestore API."""
+
+import datetime
+import json
+
+from google.cloud.firestore_bundle.types.bundle import (
+ BundledDocumentMetadata,
+ BundledQuery,
+ BundleElement,
+ BundleMetadata,
+ NamedQuery,
+)
+from google.cloud._helpers import _datetime_to_pb_timestamp, UTC # type: ignore
+from google.cloud.firestore_bundle._helpers import limit_type_of_query
+from google.cloud.firestore_v1.async_query import AsyncQuery
+from google.cloud.firestore_v1.base_client import BaseClient
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
+from google.cloud.firestore_v1.base_query import BaseQuery
+from google.cloud.firestore_v1.document import DocumentReference
+from google.cloud.firestore_v1 import _helpers
+from google.protobuf.timestamp_pb2 import Timestamp # type: ignore
+from google.protobuf import json_format # type: ignore
+from typing import (
+ Dict,
+ List,
+ Optional,
+ Union,
+)
+
+
+class FirestoreBundle:
+ """A group of serialized documents and queries, suitable for
+ longterm storage or query resumption.
+
+ If any queries are added to this bundle, all associated documents will be
+ loaded and stored in memory for serialization.
+
+ Usage:
+
+ from google.cloud.firestore import Client
+ from google.cloud.firestore_bundle import FirestoreBundle
+ from google.cloud.firestore import _helpers
+
+ db = Client()
+ bundle = FirestoreBundle('my-bundle')
+ bundle.add_named_query('all-users', db.collection('users')._query())
+ bundle.add_named_query(
+ 'top-ten-hamburgers',
+ db.collection('hamburgers').limit(limit=10)._query(),
+ )
+ serialized: str = bundle.build()
+
+ # Store somewhere like your GCS for retrieval by a client SDK.
+
+ Args:
+ name (str): The Id of the bundle.
+ """
+
+ BUNDLE_SCHEMA_VERSION: int = 1
+
+ def __init__(self, name: str) -> None:
+ self.name: str = name
+ self.documents: Dict[str, "_BundledDocument"] = {}
+ self.named_queries: Dict[str, NamedQuery] = {}
+ self.latest_read_time: Timestamp = Timestamp(seconds=0, nanos=0)
+ self._deserialized_metadata: Optional[BundledDocumentMetadata] = None
+
+ def add_document(self, snapshot: DocumentSnapshot) -> "FirestoreBundle":
+ """Adds a document to the bundle.
+
+ Args:
+ snapshot (DocumentSnapshot): The fully-loaded Firestore document to
+ be preserved.
+
+ Example:
+
+ from google.cloud import firestore
+
+ db = firestore.Client()
+ collection_ref = db.collection(u'users')
+
+ bundle = firestore.FirestoreBundle('my bundle')
+ bundle.add_document(collection_ref.documents('some_id').get())
+
+ Returns:
+ FirestoreBundle: self
+ """
+ original_document: Optional[_BundledDocument]
+ original_queries: Optional[List[str]] = []
+ full_document_path: str = snapshot.reference._document_path
+
+ original_document = self.documents.get(full_document_path)
+ if original_document:
+ original_queries = original_document.metadata.queries # type: ignore
+
+ should_use_snaphot: bool = (
+ original_document is None
+ # equivalent to:
+ # `if snapshot.read_time > original_document.snapshot.read_time`
+ or _helpers.compare_timestamps(
+ snapshot.read_time, original_document.snapshot.read_time,
+ )
+ >= 0
+ )
+
+ if should_use_snaphot:
+ self.documents[full_document_path] = _BundledDocument(
+ snapshot=snapshot,
+ metadata=BundledDocumentMetadata(
+ name=full_document_path,
+ read_time=snapshot.read_time,
+ exists=snapshot.exists,
+ queries=original_queries,
+ ),
+ )
+
+ self._update_last_read_time(snapshot.read_time)
+ self._reset_metadata()
+ return self
+
+ def add_named_query(self, name: str, query: BaseQuery) -> "FirestoreBundle":
+ """Adds a query to the bundle, referenced by the provided name.
+
+ Args:
+ name (str): The name by which the provided query should be referenced.
+ query (Query): Query of documents to be fully loaded and stored in
+ the bundle for future access.
+
+ Example:
+
+ from google.cloud import firestore
+
+ db = firestore.Client()
+ collection_ref = db.collection(u'users')
+
+ bundle = firestore.FirestoreBundle('my bundle')
+ bundle.add_named_query('all the users', collection_ref._query())
+
+ Returns:
+ FirestoreBundle: self
+
+ Raises:
+ ValueError: If anything other than a BaseQuery (e.g., a Collection)
+ is supplied. If you have a Collection, call its `_query()`
+ method to get what this method expects.
+ ValueError: If the supplied name has already been added.
+ """
+ if not isinstance(query, BaseQuery):
+ raise ValueError(
+ "Attempted to add named query of type: "
+ f"{type(query).__name__}. Expected BaseQuery.",
+ )
+
+ if name in self.named_queries:
+ raise ValueError(f"Query name conflict: {name} has already been added.")
+
+ # Execute the query and save each resulting document
+ _read_time = self._save_documents_from_query(query, query_name=name)
+
+ # Actually save the query to our local object cache
+ self._save_named_query(name, query, _read_time)
+ self._reset_metadata()
+ return self
+
+ def _save_documents_from_query(
+ self, query: BaseQuery, query_name: str
+ ) -> datetime.datetime:
+ _read_time = datetime.datetime.min.replace(tzinfo=UTC)
+ if isinstance(query, AsyncQuery):
+ import asyncio
+
+ loop = asyncio.get_event_loop()
+ return loop.run_until_complete(self._process_async_query(query, query_name))
+
+ # `query` is now known to be a non-async `BaseQuery`
+ doc: DocumentSnapshot
+ for doc in query.stream(): # type: ignore
+ self.add_document(doc)
+ bundled_document = self.documents.get(doc.reference._document_path)
+ bundled_document.metadata.queries.append(query_name) # type: ignore
+ _read_time = doc.read_time
+ return _read_time
+
+ def _save_named_query(
+ self, name: str, query: BaseQuery, read_time: datetime.datetime,
+ ) -> None:
+ self.named_queries[name] = self._build_named_query(
+ name=name, snapshot=query, read_time=read_time,
+ )
+ self._update_last_read_time(read_time)
+
+ async def _process_async_query(
+ self, snapshot: AsyncQuery, query_name: str,
+ ) -> datetime.datetime:
+ doc: DocumentSnapshot
+ _read_time = datetime.datetime.min.replace(tzinfo=UTC)
+ async for doc in snapshot.stream():
+ self.add_document(doc)
+ bundled_document = self.documents.get(doc.reference._document_path)
+ bundled_document.metadata.queries.append(query_name) # type: ignore
+ _read_time = doc.read_time
+ return _read_time
+
+ def _build_named_query(
+ self, name: str, snapshot: BaseQuery, read_time: datetime.datetime,
+ ) -> NamedQuery:
+ return NamedQuery(
+ name=name,
+ bundled_query=BundledQuery(
+ parent=name,
+ structured_query=snapshot._to_protobuf()._pb,
+ limit_type=limit_type_of_query(snapshot),
+ ),
+ read_time=_helpers.build_timestamp(read_time),
+ )
+
+ def _update_last_read_time(
+ self, read_time: Union[datetime.datetime, Timestamp]
+ ) -> None:
+ _ts: Timestamp = (
+ read_time
+ if isinstance(read_time, Timestamp)
+ else _datetime_to_pb_timestamp(read_time)
+ )
+
+ # if `_ts` is greater than `self.latest_read_time`
+ if _helpers.compare_timestamps(_ts, self.latest_read_time) == 1:
+ self.latest_read_time = _ts
+
+ def _add_bundle_element(self, bundle_element: BundleElement, *, client: BaseClient, type: str): # type: ignore
+ """Applies BundleElements to this FirestoreBundle instance as a part of
+ deserializing a FirestoreBundle string.
+ """
+ from google.cloud.firestore_v1.types.document import Document
+
+ if getattr(self, "_doc_metadata_map", None) is None:
+ self._doc_metadata_map = {}
+ if type == "metadata":
+ self._deserialized_metadata = bundle_element.metadata # type: ignore
+ elif type == "namedQuery":
+ self.named_queries[bundle_element.named_query.name] = bundle_element.named_query # type: ignore
+ elif type == "documentMetadata":
+ self._doc_metadata_map[
+ bundle_element.document_metadata.name
+ ] = bundle_element.document_metadata
+ elif type == "document":
+ doc_ref_value = _helpers.DocumentReferenceValue(
+ bundle_element.document.name
+ )
+ snapshot = DocumentSnapshot(
+ data=_helpers.decode_dict(
+ Document(mapping=bundle_element.document).fields, client
+ ),
+ exists=True,
+ reference=DocumentReference(
+ doc_ref_value.collection_name,
+ doc_ref_value.document_id,
+ client=client,
+ ),
+ read_time=self._doc_metadata_map[
+ bundle_element.document.name
+ ].read_time,
+ create_time=bundle_element.document.create_time, # type: ignore
+ update_time=bundle_element.document.update_time, # type: ignore
+ )
+ self.add_document(snapshot)
+
+ bundled_document = self.documents.get(snapshot.reference._document_path)
+ for query_name in self._doc_metadata_map[
+ bundle_element.document.name
+ ].queries:
+ bundled_document.metadata.queries.append(query_name) # type: ignore
+ else:
+ raise ValueError(f"Unexpected type of BundleElement: {type}")
+
+ def build(self) -> str:
+ """Iterates over the bundle's stored documents and queries and produces
+ a single length-prefixed json string suitable for long-term storage.
+
+ Example:
+
+ from google.cloud import firestore
+
+ db = firestore.Client()
+ collection_ref = db.collection(u'users')
+
+ bundle = firestore.FirestoreBundle('my bundle')
+ bundle.add_named_query('app-users', collection_ref._query())
+
+ serialized_bundle: str = bundle.build()
+
+ # Now upload `serialized_bundle` to Google Cloud Storage, store it
+ # in Memorystore, or any other storage solution.
+
+ Returns:
+ str: The length-prefixed string representation of this bundle'
+ contents.
+ """
+ buffer: str = ""
+
+ named_query: NamedQuery
+ for named_query in self.named_queries.values():
+ buffer += self._compile_bundle_element(
+ BundleElement(named_query=named_query)
+ )
+
+ bundled_document: "_BundledDocument" # type: ignore
+ document_count: int = 0
+ for bundled_document in self.documents.values():
+ buffer += self._compile_bundle_element(
+ BundleElement(document_metadata=bundled_document.metadata)
+ )
+ document_count += 1
+ buffer += self._compile_bundle_element(
+ BundleElement(document=bundled_document.snapshot._to_protobuf()._pb,)
+ )
+
+ metadata: BundleElement = BundleElement(
+ metadata=self._deserialized_metadata
+ or BundleMetadata(
+ id=self.name,
+ create_time=_helpers.build_timestamp(),
+ version=FirestoreBundle.BUNDLE_SCHEMA_VERSION,
+ total_documents=document_count,
+ total_bytes=len(buffer.encode("utf-8")),
+ )
+ )
+ return f"{self._compile_bundle_element(metadata)}{buffer}"
+
+ def _compile_bundle_element(self, bundle_element: BundleElement) -> str:
+ serialized_be = json.dumps(json_format.MessageToDict(bundle_element._pb))
+ return f"{len(serialized_be)}{serialized_be}"
+
+ def _reset_metadata(self):
+ """Hydrating bundles stores cached data we must reset anytime new
+ queries or documents are added"""
+ self._deserialized_metadata = None
+
+
+class _BundledDocument:
+ """Convenience class to hold both the metadata and the actual content
+ of a document to be bundled."""
+
+ def __init__(
+ self, snapshot: DocumentSnapshot, metadata: BundledDocumentMetadata,
+ ) -> None:
+ self.snapshot = snapshot
+ self.metadata = metadata
diff --git a/google/cloud/firestore_bundle/py.typed b/google/cloud/firestore_bundle/py.typed
new file mode 100644
index 0000000000..e2987f2963
--- /dev/null
+++ b/google/cloud/firestore_bundle/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-bundle package uses inline types.
diff --git a/google/cloud/firestore_bundle/services/__init__.py b/google/cloud/firestore_bundle/services/__init__.py
new file mode 100644
index 0000000000..42ffdf2bc4
--- /dev/null
+++ b/google/cloud/firestore_bundle/services/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google/cloud/firestore_bundle/types/__init__.py b/google/cloud/firestore_bundle/types/__init__.py
new file mode 100644
index 0000000000..737862b173
--- /dev/null
+++ b/google/cloud/firestore_bundle/types/__init__.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .bundle import (
+ BundledDocumentMetadata,
+ BundledQuery,
+ BundleElement,
+ BundleMetadata,
+ NamedQuery,
+)
+
+__all__ = (
+ "BundledDocumentMetadata",
+ "BundledQuery",
+ "BundleElement",
+ "BundleMetadata",
+ "NamedQuery",
+)
diff --git a/google/cloud/firestore_bundle/types/bundle.py b/google/cloud/firestore_bundle/types/bundle.py
new file mode 100644
index 0000000000..3d78bfe00f
--- /dev/null
+++ b/google/cloud/firestore_bundle/types/bundle.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.cloud.firestore_v1.types import document as gfv_document
+from google.cloud.firestore_v1.types import query
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.firestore.bundle",
+ manifest={
+ "BundledQuery",
+ "NamedQuery",
+ "BundledDocumentMetadata",
+ "BundleMetadata",
+ "BundleElement",
+ },
+)
+
+
+class BundledQuery(proto.Message):
+ r"""Encodes a query saved in the bundle.
+
+ Attributes:
+ parent (str):
+ The parent resource name.
+ structured_query (google.firestore.v1.query_pb2.StructuredQuery):
+ A structured query.
+ limit_type (google.cloud.bundle.types.BundledQuery.LimitType):
+
+ """
+
+ class LimitType(proto.Enum):
+ r"""If the query is a limit query, should the limit be applied to
+ the beginning or the end of results.
+ """
+ FIRST = 0
+ LAST = 1
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ structured_query = proto.Field(
+ proto.MESSAGE, number=2, oneof="query_type", message=query.StructuredQuery,
+ )
+
+ limit_type = proto.Field(proto.ENUM, number=3, enum=LimitType,)
+
+
+class NamedQuery(proto.Message):
+ r"""A Query associated with a name, created as part of the bundle
+ file, and can be read by client SDKs once the bundle containing
+ them is loaded.
+
+ Attributes:
+ name (str):
+ Name of the query, such that client can use
+ the name to load this query from bundle, and
+ resume from when the query results are
+ materialized into this bundle.
+ bundled_query (google.cloud.bundle.types.BundledQuery):
+ The query saved in the bundle.
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
+ The read time of the query, when it is used
+ to build the bundle. This is useful to resume
+ the query from the bundle, once it is loaded by
+ client SDKs.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ bundled_query = proto.Field(proto.MESSAGE, number=2, message="BundledQuery",)
+
+ read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)
+
+
+class BundledDocumentMetadata(proto.Message):
+ r"""Metadata describing a Firestore document saved in the bundle.
+
+ Attributes:
+ name (str):
+ The document key of a bundled document.
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
+ The snapshot version of the document data
+ bundled.
+ exists (bool):
+ Whether the document exists.
+ queries (Sequence[str]):
+ The names of the queries in this bundle that
+ this document matches to.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ read_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ exists = proto.Field(proto.BOOL, number=3)
+
+ queries = proto.RepeatedField(proto.STRING, number=4)
+
+
+class BundleMetadata(proto.Message):
+ r"""Metadata describing the bundle file/stream.
+
+ Attributes:
+ id (str):
+ The ID of the bundle.
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
+ Time at which the documents snapshot is taken
+ for this bundle.
+ version (int):
+ The schema version of the bundle.
+ total_documents (int):
+ The number of documents in the bundle.
+ total_bytes (int):
+ The size of the bundle in bytes, excluding this
+ ``BundleMetadata``.
+ """
+
+ id = proto.Field(proto.STRING, number=1)
+
+ create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
+
+ version = proto.Field(proto.UINT32, number=3)
+
+ total_documents = proto.Field(proto.UINT32, number=4)
+
+ total_bytes = proto.Field(proto.UINT64, number=5)
+
+
+class BundleElement(proto.Message):
+ r"""A Firestore bundle is a length-prefixed stream of JSON
+ representations of ``BundleElement``. Only one ``BundleMetadata`` is
+ expected, and it should be the first element. The named queries
+ follow after ``metadata``. Every ``document_metadata`` is
+ immediately followed by a ``document``.
+
+ Attributes:
+ metadata (google.cloud.bundle.types.BundleMetadata):
+
+ named_query (google.cloud.bundle.types.NamedQuery):
+
+ document_metadata (google.cloud.bundle.types.BundledDocumentMetadata):
+
+ document (google.firestore.v1.document_pb2.Document):
+
+ """
+
+ metadata = proto.Field(
+ proto.MESSAGE, number=1, oneof="element_type", message="BundleMetadata",
+ )
+
+ named_query = proto.Field(
+ proto.MESSAGE, number=2, oneof="element_type", message="NamedQuery",
+ )
+
+ document_metadata = proto.Field(
+ proto.MESSAGE,
+ number=3,
+ oneof="element_type",
+ message="BundledDocumentMetadata",
+ )
+
+ document = proto.Field(
+ proto.MESSAGE, number=4, oneof="element_type", message=gfv_document.Document,
+ )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/firestore_v1/_helpers.py b/google/cloud/firestore_v1/_helpers.py
index 89cf3b0025..aebdbee477 100644
--- a/google/cloud/firestore_v1/_helpers.py
+++ b/google/cloud/firestore_v1/_helpers.py
@@ -15,7 +15,9 @@
"""Common helpers shared across Google Cloud Firestore modules."""
import datetime
+import json
+import google
from google.api_core.datetime_helpers import DatetimeWithNanoseconds # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.protobuf import struct_pb2
@@ -32,7 +34,18 @@
from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.types import write
-from typing import Any, Generator, List, NoReturn, Optional, Tuple, Union
+from google.protobuf.timestamp_pb2 import Timestamp # type: ignore
+from typing import (
+ Any,
+ Dict,
+ Generator,
+ Iterator,
+ List,
+ NoReturn,
+ Optional,
+ Tuple,
+ Union,
+)
_EmptyDict: transforms.Sentinel
_GRPC_ERROR_MAPPING: dict
@@ -219,6 +232,72 @@ def encode_dict(values_dict) -> dict:
return {key: encode_value(value) for key, value in values_dict.items()}
+def document_snapshot_to_protobuf(snapshot: "google.cloud.firestore_v1.base_document.DocumentSnapshot") -> Optional["google.cloud.firestore_v1.types.Document"]: # type: ignore
+ from google.cloud.firestore_v1.types import Document
+
+ if not snapshot.exists:
+ return None
+
+ return Document(
+ name=snapshot.reference._document_path,
+ fields=encode_dict(snapshot._data),
+ create_time=snapshot.create_time,
+ update_time=snapshot.update_time,
+ )
+
+
+class DocumentReferenceValue:
+ """DocumentReference path container with accessors for each relevant chunk.
+
+ Usage:
+ doc_ref_val = DocumentReferenceValue(
+ 'projects/my-proj/databases/(default)/documents/my-col/my-doc',
+ )
+ assert doc_ref_val.project_name == 'my-proj'
+ assert doc_ref_val.collection_name == 'my-col'
+ assert doc_ref_val.document_id == 'my-doc'
+ assert doc_ref_val.database_name == '(default)'
+
+ Raises:
+ ValueError: If the supplied value cannot satisfy a complete path.
+ """
+
+ def __init__(self, reference_value: str):
+ self._reference_value = reference_value
+
+ # The first 5 parts are
+ # projects, {project}, databases, {database}, documents
+ parts = reference_value.split(DOCUMENT_PATH_DELIMITER)
+ if len(parts) < 7:
+ msg = BAD_REFERENCE_ERROR.format(reference_value)
+ raise ValueError(msg)
+
+ self.project_name = parts[1]
+ self.collection_name = parts[5]
+ self.database_name = parts[3]
+ self.document_id = "/".join(parts[6:])
+
+ @property
+ def full_key(self) -> str:
+ """Computed property for a DocumentReference's collection_name and
+ document Id"""
+ return "/".join([self.collection_name, self.document_id])
+
+ @property
+ def full_path(self) -> str:
+ return self._reference_value or "/".join(
+ [
+ "projects",
+ self.project_name,
+ "databases",
+ self.database_name,
+ "documents",
+ self.collection_name,
+ self.document_id,
+ ]
+ )
+
+
def reference_value_to_document(reference_value, client) -> Any:
"""Convert a reference value string to a document.
@@ -237,15 +316,11 @@ def reference_value_to_document(reference_value, client) -> Any:
ValueError: If the ``reference_value`` does not come from the same
project / database combination as the ``client``.
"""
- # The first 5 parts are
- # projects, {project}, databases, {database}, documents
- parts = reference_value.split(DOCUMENT_PATH_DELIMITER, 5)
- if len(parts) != 6:
- msg = BAD_REFERENCE_ERROR.format(reference_value)
- raise ValueError(msg)
+ from google.cloud.firestore_v1.base_document import BaseDocumentReference
- # The sixth part is `a/b/c/d` (i.e. the document path)
- document = client.document(parts[-1])
+ doc_ref_value = DocumentReferenceValue(reference_value)
+
+ document: BaseDocumentReference = client.document(doc_ref_value.full_key)
if document._document_path != reference_value:
msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string)
raise ValueError(msg)
@@ -1041,3 +1116,179 @@ def make_retry_timeout_kwargs(retry, timeout) -> dict:
kwargs["timeout"] = timeout
return kwargs
+
+
+def build_timestamp(
+ dt: Optional[Union[DatetimeWithNanoseconds, datetime.datetime]] = None
+) -> Timestamp:
+ """Returns the supplied datetime (or "now") as a Timestamp"""
+ return _datetime_to_pb_timestamp(dt or DatetimeWithNanoseconds.utcnow())
+
+
+def compare_timestamps(
+ ts1: Union[Timestamp, datetime.datetime], ts2: Union[Timestamp, datetime.datetime],
+) -> int:
+ ts1 = build_timestamp(ts1) if not isinstance(ts1, Timestamp) else ts1
+ ts2 = build_timestamp(ts2) if not isinstance(ts2, Timestamp) else ts2
+ ts1_nanos = ts1.nanos + ts1.seconds * 1e9
+ ts2_nanos = ts2.nanos + ts2.seconds * 1e9
+ if ts1_nanos == ts2_nanos:
+ return 0
+ return 1 if ts1_nanos > ts2_nanos else -1
+
+
+def deserialize_bundle(
+ serialized: Union[str, bytes],
+ client: "google.cloud.firestore_v1.client.BaseClient", # type: ignore
+) -> "google.cloud.firestore_bundle.FirestoreBundle": # type: ignore
+ """Inverse operation to a `FirestoreBundle` instance's `build()` method.
+
+ Args:
+ serialized (Union[str, bytes]): The result of `FirestoreBundle.build()`.
+ Should be a list of dictionaries in string format.
+ client (BaseClient): A connected Client instance.
+
+ Returns:
+ FirestoreBundle: A bundle equivalent to that which called `build()` and
+ initially created the `serialized` value.
+
+ Raises:
+ ValueError: If any of the dictionaries in the list contain any more than
+ one top-level key.
+ ValueError: If any unexpected BundleElement types are encountered.
+ ValueError: If the serialized bundle ends before expected.
+ """
+ from google.cloud.firestore_bundle import BundleElement, FirestoreBundle
+
+ # Outlines the legal transitions from one BundleElement to another.
+ bundle_state_machine = {
+ "__initial__": ["metadata"],
+ "metadata": ["namedQuery", "documentMetadata", "__end__"],
+ "namedQuery": ["namedQuery", "documentMetadata", "__end__"],
+ "documentMetadata": ["document"],
+ "document": ["documentMetadata", "__end__"],
+ }
+ allowed_next_element_types: List[str] = bundle_state_machine["__initial__"]
+
+ # This must be saved and added last, since we cache it to preserve timestamps,
+ # yet must flush it whenever a new document or query is added to a bundle.
+ # The process of deserializing a bundle uses these methods which flush a
+ # cached metadata element, and thus, it must be the last BundleElement
+ # added during deserialization.
+ metadata_bundle_element: Optional[BundleElement] = None
+
+ bundle: Optional[FirestoreBundle] = None
+ data: Dict
+ for data in _parse_bundle_elements_data(serialized):
+
+ # BundleElements are serialized as JSON containing one key outlining
+ # the type, with all further data nested under that key
+ keys: List[str] = list(data.keys())
+
+ if len(keys) != 1:
+ raise ValueError("Expected serialized BundleElement with one top-level key")
+
+ key: str = keys[0]
+
+ if key not in allowed_next_element_types:
+ raise ValueError(
+ f"Encountered BundleElement of type {key}. "
+ f"Expected one of {allowed_next_element_types}"
+ )
+
+ # Create and add our BundleElement
+ bundle_element: BundleElement
+ try:
+ bundle_element: BundleElement = BundleElement.from_json(json.dumps(data)) # type: ignore
+ except AttributeError as e:
+ # Some bad serialization formats cannot be universally deserialized.
+ if e.args[0] == "'dict' object has no attribute 'find'":
+ raise ValueError(
+ "Invalid serialization of datetimes. "
+ "Cannot deserialize Bundles created from the NodeJS SDK."
+ )
+ raise e # pragma: NO COVER
+
+ if bundle is None:
+ # This must be the first bundle type encountered
+ assert key == "metadata"
+ bundle = FirestoreBundle(data[key]["id"])
+ metadata_bundle_element = bundle_element
+
+ else:
+ bundle._add_bundle_element(bundle_element, client=client, type=key)
+
+ # Update the allowed next BundleElement types
+ allowed_next_element_types = bundle_state_machine[key]
+
+ if "__end__" not in allowed_next_element_types:
+ raise ValueError("Unexpected end to serialized FirestoreBundle")
+
+ # Now, finally add the metadata element
+ bundle._add_bundle_element(
+ metadata_bundle_element, client=client, type="metadata", # type: ignore
+ )
+
+ return bundle
+
+
+def _parse_bundle_elements_data(serialized: Union[str, bytes]) -> Generator[Dict, None, None]: # type: ignore
+ """Reads through a serialized FirestoreBundle and yields JSON chunks that
+ were created via `BundleElement.to_json(bundle_element)`.
+
+ Serialized FirestoreBundle instances are length-prefixed JSON objects, and
+ so are of the form "123{...}57{...}"
+ To correctly and safely read a bundle, we must first detect these length
+ prefixes, read that many bytes of data, and attempt to JSON-parse that.
+
+ Raises:
+ ValueError: If a chunk of JSON ever starts without following a length
+ prefix.
+ """
+ _serialized: Iterator[int] = iter(
+ serialized if isinstance(serialized, bytes) else serialized.encode("utf-8")
+ )
+
+ length_prefix: str = ""
+ while True:
+ byte: Optional[int] = next(_serialized, None)
+
+ if byte is None:
+ return None
+
+ _str: str = chr(byte)
+ if _str.isnumeric():
+ length_prefix += _str
+ else:
+ if length_prefix == "":
+ raise ValueError("Expected length prefix")
+
+ _length_prefix = int(length_prefix)
+ length_prefix = ""
+ _bytes = bytearray([byte])
+ _counter = 1
+ while _counter < _length_prefix:
+ _bytes.append(next(_serialized))
+ _counter += 1
+
+ yield json.loads(_bytes.decode("utf-8"))
+
+
+def _get_documents_from_bundle(
+ bundle, *, query_name: Optional[str] = None
+) -> Generator["google.cloud.firestore.DocumentSnapshot", None, None]: # type: ignore
+ from google.cloud.firestore_bundle.bundle import _BundledDocument
+
+ bundled_doc: _BundledDocument
+ for bundled_doc in bundle.documents.values():
+ if query_name and query_name not in bundled_doc.metadata.queries:
+ continue
+ yield bundled_doc.snapshot
+
+
+def _get_document_from_bundle(
+ bundle, *, document_id: str,
+) -> Optional["google.cloud.firestore.DocumentSnapshot"]: # type: ignore
+ bundled_doc = bundle.documents.get(document_id)
+ if bundled_doc:
+ return bundled_doc.snapshot
diff --git a/google/cloud/firestore_v1/async_client.py b/google/cloud/firestore_v1/async_client.py
index 512025f242..8623f640d1 100644
--- a/google/cloud/firestore_v1/async_client.py
+++ b/google/cloud/firestore_v1/async_client.py
@@ -49,7 +49,7 @@
from google.cloud.firestore_v1.services.firestore.transports import (
grpc_asyncio as firestore_grpc_transport,
)
-from typing import Any, AsyncGenerator, Iterable, Tuple
+from typing import Any, AsyncGenerator, Iterable, List
class AsyncClient(BaseClient):
@@ -119,7 +119,7 @@ def _target(self):
"""
return self._target_helper(firestore_client.FirestoreAsyncClient)
- def collection(self, *collection_path: Tuple[str]) -> AsyncCollectionReference:
+ def collection(self, *collection_path: str) -> AsyncCollectionReference:
"""Get a reference to a collection.
For a top-level collection:
@@ -139,7 +139,7 @@ def collection(self, *collection_path: Tuple[str]) -> AsyncCollectionReference:
Sub-collections can be nested deeper in a similar fashion.
Args:
- collection_path (Tuple[str, ...]): Can either be
+ collection_path: Can either be
* A single ``/``-delimited path to a collection
* A tuple of collection path segments
@@ -172,7 +172,7 @@ def collection_group(self, collection_id: str) -> AsyncCollectionGroup:
"""
return AsyncCollectionGroup(self._get_collection_reference(collection_id))
- def document(self, *document_path: Tuple[str]) -> AsyncDocumentReference:
+ def document(self, *document_path: str) -> AsyncDocumentReference:
"""Get a reference to a document in a collection.
For a top-level document:
@@ -194,7 +194,7 @@ def document(self, *document_path: Tuple[str]) -> AsyncDocumentReference:
Documents in sub-collections can be nested deeper in a similar fashion.
Args:
- document_path (Tuple[str, ...]): Can either be
+ document_path: Can either be
* A single ``/``-delimited path to a document
* A tuple of document path segments
@@ -209,7 +209,7 @@ def document(self, *document_path: Tuple[str]) -> AsyncDocumentReference:
async def get_all(
self,
- references: list,
+ references: List[AsyncDocumentReference],
field_paths: Iterable[str] = None,
transaction=None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
diff --git a/google/cloud/firestore_v1/async_document.py b/google/cloud/firestore_v1/async_document.py
index 11dec64b0e..fa3a0b4814 100644
--- a/google/cloud/firestore_v1/async_document.py
+++ b/google/cloud/firestore_v1/async_document.py
@@ -13,23 +13,27 @@
# limitations under the License.
"""Classes for representing documents for the Google Cloud Firestore API."""
+import datetime
+import logging
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
+from google.cloud._helpers import _datetime_to_pb_timestamp # type: ignore
from google.cloud.firestore_v1.base_document import (
BaseDocumentReference,
DocumentSnapshot,
_first_write_result,
)
-
-from google.api_core import exceptions # type: ignore
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.types import write
-from google.protobuf import timestamp_pb2
+from google.protobuf.timestamp_pb2 import Timestamp
from typing import Any, AsyncGenerator, Coroutine, Iterable, Union
+logger = logging.getLogger(__name__)
+
+
class AsyncDocumentReference(BaseDocumentReference):
"""A reference to a document in a Firestore database.
@@ -289,7 +293,7 @@ async def delete(
option: _helpers.WriteOption = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
- ) -> timestamp_pb2.Timestamp:
+ ) -> Timestamp:
"""Delete the current document in the Firestore database.
Args:
@@ -353,31 +357,34 @@ async def get(
:attr:`create_time` attributes will all be ``None`` and
its :attr:`exists` attribute will be ``False``.
"""
- request, kwargs = self._prep_get(field_paths, transaction, retry, timeout)
+ from google.cloud.firestore_v1.base_client import _parse_batch_get
- firestore_api = self._client._firestore_api
- try:
- document_pb = await firestore_api.get_document(
- request=request, metadata=self._client._rpc_metadata, **kwargs,
+ request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout)
+
+ response_iter = await self._client._firestore_api.batch_get_documents(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ async for resp in response_iter:
+ # Immediate return as the iterator should only ever have one item.
+ return _parse_batch_get(
+ get_doc_response=resp,
+ reference_map={self._document_path: self},
+ client=self._client,
)
- except exceptions.NotFound:
- data = None
- exists = False
- create_time = None
- update_time = None
- else:
- data = _helpers.decode_dict(document_pb.fields, self._client)
- exists = True
- create_time = document_pb.create_time
- update_time = document_pb.update_time
+
+ logger.warning(
+ "`batch_get_documents` unexpectedly returned empty "
+ "stream. Expected one object.",
+ )
return DocumentSnapshot(
- reference=self,
- data=data,
- exists=exists,
- read_time=None, # No server read_time available
- create_time=create_time,
- update_time=update_time,
+ self,
+ None,
+ exists=False,
+ read_time=_datetime_to_pb_timestamp(datetime.datetime.now()),
+ create_time=None,
+ update_time=None,
)
async def collections(
diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py
index 7b9b228674..b2af21e3f6 100644
--- a/google/cloud/firestore_v1/base_client.py
+++ b/google/cloud/firestore_v1/base_client.py
@@ -148,7 +148,7 @@ def _firestore_api_helper(self, transport, client_class, client_module) -> Any:
# We need this in order to set appropriate keepalive options.
if self._emulator_host is not None:
- channel = grpc.insecure_channel(self._emulator_host)
+ channel = self._emulator_channel(transport)
else:
channel = transport.create_channel(
self._target,
@@ -165,6 +165,53 @@ def _firestore_api_helper(self, transport, client_class, client_module) -> Any:
return self._firestore_api_internal
+ def _emulator_channel(self, transport):
+ """
+ Creates a channel using self._credentials in a similar way to grpc.secure_channel but
+ using grpc.local_channel_credentials() rather than grpc.ssh_channel_credentials() to allow easy connection
+ to a local firestore emulator. This allows local testing of firestore rules if the credentials have been
+ created from a signed custom token.
+
+ :return: grpc.Channel or grpc.aio.Channel
+ """
+ # TODO: Implement a special credentials type for emulator and use
+ # "transport.create_channel" to create gRPC channels once google-auth
+ # extends it's allowed credentials types.
+ if "GrpcAsyncIOTransport" in str(transport.__name__):
+ return grpc.aio.secure_channel(
+ self._emulator_host, self._local_composite_credentials()
+ )
+ else:
+ return grpc.secure_channel(
+ self._emulator_host, self._local_composite_credentials()
+ )
+
+ def _local_composite_credentials(self):
+ """
+ Creates the credentials for the local emulator channel
+ :return: grpc.ChannelCredentials
+ """
+ credentials = google.auth.credentials.with_scopes_if_required(
+ self._credentials, None
+ )
+ request = google.auth.transport.requests.Request()
+
+ # Create the metadata plugin for inserting the authorization header.
+ metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
+ credentials, request
+ )
+
+ # Create a set of grpc.CallCredentials using the metadata plugin.
+ google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
+
+ # Using the local_credentials to allow connection to emulator
+ local_credentials = grpc.local_channel_credentials()
+
+ # Combine the local credentials and the authorization credentials.
+ return grpc.composite_channel_credentials(
+ local_credentials, google_auth_credentials
+ )
+
def _target_helper(self, client_class) -> str:
"""Return the target (where the API is).
Eg. "firestore.googleapis.com"
@@ -272,7 +319,7 @@ def _document_path_helper(self, *document_path) -> List[str]:
return joined_path.split(_helpers.DOCUMENT_PATH_DELIMITER)
@staticmethod
- def field_path(*field_names: Tuple[str]) -> str:
+ def field_path(*field_names: str) -> str:
"""Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
@@ -293,7 +340,7 @@ def field_path(*field_names: Tuple[str]) -> str:
``data['aa']['bb']['cc']``.
Args:
- field_names (Tuple[str, ...]): The list of field names.
+ field_names: The list of field names.
Returns:
str: The ``.``-delimited field path.
diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py
index 956c4b4b15..a022e96ba7 100644
--- a/google/cloud/firestore_v1/base_collection.py
+++ b/google/cloud/firestore_v1/base_collection.py
@@ -183,6 +183,10 @@ def _prep_list_documents(
"collection_id": self.id,
"page_size": page_size,
"show_missing": True,
+ # list_documents returns an iterator of document references, which do not
+ # include any fields. To save on data transfer, we can set a field_path mask
+ # to include no fields
+ "mask": {"field_paths": None},
}
kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
@@ -284,12 +288,15 @@ def limit(self, count: int) -> BaseQuery:
def limit_to_last(self, count: int):
"""Create a limited to last query with this collection as parent.
+
.. note::
`limit` and `limit_to_last` are mutually exclusive.
Setting `limit_to_last` will drop previously set `limit`.
+
See
:meth:`~google.cloud.firestore_v1.query.Query.limit_to_last`
for more information on this method.
+
Args:
count (int): Maximum number of documents to return that
match the query.
diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py
index 441a30b51a..32694ac472 100644
--- a/google/cloud/firestore_v1/base_document.py
+++ b/google/cloud/firestore_v1/base_document.py
@@ -18,6 +18,7 @@
from google.api_core import retry as retries # type: ignore
+from google.cloud.firestore_v1.types import Document
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1 import field_path as field_path_module
from google.cloud.firestore_v1.types import common
@@ -25,7 +26,7 @@
# Types needed only for Type Hints
from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.types import write
-from typing import Any, Dict, Iterable, NoReturn, Union, Tuple
+from typing import Any, Dict, Iterable, NoReturn, Optional, Union, Tuple
class BaseDocumentReference(object):
@@ -268,7 +269,7 @@ def delete(
) -> NoReturn:
raise NotImplementedError
- def _prep_get(
+ def _prep_batch_get(
self,
field_paths: Iterable[str] = None,
transaction=None,
@@ -285,7 +286,8 @@ def _prep_get(
mask = None
request = {
- "name": self._document_path,
+ "database": self._client._database_string,
+ "documents": [self._document_path],
"mask": mask,
"transaction": _helpers.get_transaction_id(transaction),
}
@@ -490,6 +492,9 @@ def to_dict(self) -> Union[Dict[str, Any], None]:
return None
return copy.deepcopy(self._data)
+ def _to_protobuf(self) -> Optional[Document]:
+ return _helpers.document_snapshot_to_protobuf(self)
+
def _get_document_path(client, path: Tuple[str]) -> str:
"""Convert a path tuple into a full path string.
diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py
index 6e06719078..564483b5e6 100644
--- a/google/cloud/firestore_v1/base_query.py
+++ b/google/cloud/firestore_v1/base_query.py
@@ -33,7 +33,7 @@
from google.cloud.firestore_v1.types import Cursor
from google.cloud.firestore_v1.types import RunQueryResponse
from google.cloud.firestore_v1.order import Order
-from typing import Any, Dict, Iterable, NoReturn, Optional, Tuple, Union
+from typing import Any, Dict, Generator, Iterable, NoReturn, Optional, Tuple, Union
# Types needed only for Type Hints
from google.cloud.firestore_v1.base_document import DocumentSnapshot
@@ -370,9 +370,11 @@ def limit(self, count: int) -> "BaseQuery":
"""Limit a query to return at most `count` matching results.
If the current query already has a `limit` set, this will override it.
+
.. note::
`limit` and `limit_to_last` are mutually exclusive.
Setting `limit` will drop previously set `limit_to_last`.
+
Args:
count (int): Maximum number of documents to return that match
the query.
@@ -398,9 +400,11 @@ def limit_to_last(self, count: int) -> "BaseQuery":
"""Limit a query to return the last `count` matching results.
If the current query already has a `limit_to_last`
set, this will override it.
+
.. note::
`limit` and `limit_to_last` are mutually exclusive.
Setting `limit_to_last` will drop previously set `limit`.
+
Args:
count (int): Maximum number of documents to return that match
the query.
@@ -800,12 +804,11 @@ def _to_protobuf(self) -> StructuredQuery:
query_kwargs["offset"] = self._offset
if self._limit is not None:
query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit)
-
return query.StructuredQuery(**query_kwargs)
def get(
self, transaction=None, retry: retries.Retry = None, timeout: float = None,
- ) -> NoReturn:
+ ) -> Iterable[DocumentSnapshot]:
raise NotImplementedError
def _prep_stream(
@@ -830,7 +833,7 @@ def _prep_stream(
def stream(
self, transaction=None, retry: retries.Retry = None, timeout: float = None,
- ) -> NoReturn:
+ ) -> Generator[document.DocumentSnapshot, Any, None]:
raise NotImplementedError
def on_snapshot(self, callback) -> NoReturn:
diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py
index 6ad5f76e64..20ef5055f3 100644
--- a/google/cloud/firestore_v1/client.py
+++ b/google/cloud/firestore_v1/client.py
@@ -44,7 +44,7 @@
from google.cloud.firestore_v1.services.firestore.transports import (
grpc as firestore_grpc_transport,
)
-from typing import Any, Generator, Iterable, Tuple
+from typing import Any, Generator, Iterable
# Types needed only for Type Hints
from google.cloud.firestore_v1.base_document import DocumentSnapshot
@@ -117,7 +117,7 @@ def _target(self):
"""
return self._target_helper(firestore_client.FirestoreClient)
- def collection(self, *collection_path: Tuple[str]) -> CollectionReference:
+ def collection(self, *collection_path: str) -> CollectionReference:
"""Get a reference to a collection.
For a top-level collection:
@@ -137,7 +137,7 @@ def collection(self, *collection_path: Tuple[str]) -> CollectionReference:
Sub-collections can be nested deeper in a similar fashion.
Args:
- collection_path (Tuple[str, ...]): Can either be
+ collection_path: Can either be
* A single ``/``-delimited path to a collection
* A tuple of collection path segments
@@ -170,7 +170,7 @@ def collection_group(self, collection_id: str) -> CollectionGroup:
"""
return CollectionGroup(self._get_collection_reference(collection_id))
- def document(self, *document_path: Tuple[str]) -> DocumentReference:
+ def document(self, *document_path: str) -> DocumentReference:
"""Get a reference to a document in a collection.
For a top-level document:
@@ -192,7 +192,7 @@ def document(self, *document_path: Tuple[str]) -> DocumentReference:
Documents in sub-collections can be nested deeper in a similar fashion.
Args:
- document_path (Tuple[str, ...]): Can either be
+ document_path): Can either be
* A single ``/``-delimited path to a document
* A tuple of document path segments
diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py
index bdb5c7943b..bd1798a8a9 100644
--- a/google/cloud/firestore_v1/document.py
+++ b/google/cloud/firestore_v1/document.py
@@ -13,24 +13,28 @@
# limitations under the License.
"""Classes for representing documents for the Google Cloud Firestore API."""
+import datetime
+import logging
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
+from google.cloud._helpers import _datetime_to_pb_timestamp # type: ignore
from google.cloud.firestore_v1.base_document import (
BaseDocumentReference,
DocumentSnapshot,
_first_write_result,
)
-
-from google.api_core import exceptions # type: ignore
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.types import write
from google.cloud.firestore_v1.watch import Watch
-from google.protobuf import timestamp_pb2
+from google.protobuf.timestamp_pb2 import Timestamp
from typing import Any, Callable, Generator, Iterable
+logger = logging.getLogger(__name__)
+
+
class DocumentReference(BaseDocumentReference):
"""A reference to a document in a Firestore database.
@@ -65,7 +69,14 @@ def create(
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
) -> write.WriteResult:
- """Create the current document in the Firestore database.
+ """Create a document in the Firestore database.
+
+ >>> document_data = {"a": 1, "b": {"c": "Two"}}
+ >>> document.get().to_dict() is None # does not exist
+ True
+ >>> document.create(document_data)
+ >>> document.get().to_dict() == document_data # exists
+ True
Args:
document_data (dict): Property names and values to use for
@@ -95,23 +106,51 @@ def set(
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
) -> write.WriteResult:
- """Replace the current document in the Firestore database.
+ """Create / replace / merge a document in the Firestore database.
+
+ - To "upsert" a document (create if it doesn't exist, replace completely
+ if it does), leave the ``merge`` argument at its default:
+
+ >>> document_data = {"a": 1, "b": {"c": "Two"}}
+ >>> document.get().to_dict() is None # document exists
+ False
+ >>> document.set(document_data)
+ >>> document.get().to_dict() == document_data # exists
+ True
+
+ - To "merge" ``document_data`` with an existing document (creating if
+ the document does not exist), pass ``merge`` as True``:
+
+ >>> document_data = {"a": 1, "b": {"c": "Two"}}
+ >>> document.get().to_dict() == {"d": "Three", "b": {}} # exists
+ >>> document.set(document_data, merge=True)
+ >>> document.get().to_dict() == {"a": 1, "d": "Three", "b": {"c": "Two"}}
+ True
+
+ In this case, existing documents with top-level keys which are
+ not present in ``document_data`` (``"d"``) will preserve the values
+ of those keys.
+
- A write ``option`` can be specified to indicate preconditions of
- the "set" operation. If no ``option`` is specified and this document
- doesn't exist yet, this method will create it.
+ - To merge only specific fields of ``document_data`` with existing
+ documents (creating if the document does not exist), pass ``merge``
+ as a list of field paths:
- Overwrites all content for the document with the fields in
- ``document_data``. This method performs almost the same functionality
- as :meth:`create`. The only difference is that this method doesn't
- make any requirements on the existence of the document (unless
- ``option`` is used), whereas as :meth:`create` will fail if the
- document already exists.
+
+ >>> document_data = {"a": 1, "b": {"c": "Two"}}
+ >>> document.get().to_dict() == {"b": {"c": "One", "d": "Four" }} # exists
+ True
+ >>> document.set(document_data, merge=["b.c"])
+ >>> document.get().to_dict() == {"b": {"c": "Two", "d": "Four" }}
+ True
+
+ For more information on field paths, see
+ :meth:`~google.cloud.firestore_v1.base_client.BaseClient.field_path`.
Args:
document_data (dict): Property names and values to use for
replacing a document.
- merge (Optional[bool] or Optional[List]):
+ merge (Optional[bool] or Optional[List]):
If True, apply merging instead of overwriting the state
of the document.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -142,9 +181,9 @@ def update(
override these preconditions.
Each key in ``field_updates`` can either be a field name or a
- **field path** (For more information on **field paths**, see
- :meth:`~google.cloud.firestore_v1.client.Client.field_path`.) To
- illustrate this, consider a document with
+ **field path** (For more information on field paths, see
+ :meth:`~google.cloud.firestore_v1.base_client.BaseClient.field_path`.)
+ To illustrate this, consider a document with
.. code-block:: python
@@ -290,7 +329,7 @@ def delete(
option: _helpers.WriteOption = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
- ) -> timestamp_pb2.Timestamp:
+ ) -> Timestamp:
"""Delete the current document in the Firestore database.
Args:
@@ -354,31 +393,35 @@ def get(
:attr:`create_time` attributes will all be ``None`` and
its :attr:`exists` attribute will be ``False``.
"""
- request, kwargs = self._prep_get(field_paths, transaction, retry, timeout)
+ from google.cloud.firestore_v1.base_client import _parse_batch_get
+
+ request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout)
- firestore_api = self._client._firestore_api
- try:
- document_pb = firestore_api.get_document(
- request=request, metadata=self._client._rpc_metadata, **kwargs,
+ response_iter = self._client._firestore_api.batch_get_documents(
+ request=request, metadata=self._client._rpc_metadata, **kwargs,
+ )
+
+ get_doc_response = next(response_iter, None)
+
+ if get_doc_response is not None:
+ return _parse_batch_get(
+ get_doc_response=get_doc_response,
+ reference_map={self._document_path: self},
+ client=self._client,
)
- except exceptions.NotFound:
- data = None
- exists = False
- create_time = None
- update_time = None
- else:
- data = _helpers.decode_dict(document_pb.fields, self._client)
- exists = True
- create_time = document_pb.create_time
- update_time = document_pb.update_time
+
+ logger.warning(
+ "`batch_get_documents` unexpectedly returned empty "
+ "stream. Expected one object.",
+ )
return DocumentSnapshot(
- reference=self,
- data=data,
- exists=exists,
- read_time=None, # No server read_time available
- create_time=create_time,
- update_time=update_time,
+ self,
+ None,
+ exists=False,
+ read_time=_datetime_to_pb_timestamp(datetime.datetime.now()),
+ create_time=None,
+ update_time=None,
)
def collections(
@@ -420,7 +463,7 @@ def on_snapshot(self, callback: Callable) -> Watch:
provided callback is run on the snapshot.
Args:
- callback(Callable[[:class:`~google.cloud.firestore.document.DocumentSnapshot`], NoneType]):
+ callback(Callable[[:class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`], NoneType]):
a callback to run when a change occurs
Example:
diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py
index 1716999be4..aa2f5ad096 100644
--- a/google/cloud/firestore_v1/query.py
+++ b/google/cloud/firestore_v1/query.py
@@ -19,6 +19,7 @@
a more common way to create a query than direct usage of the constructor.
"""
+from google.cloud.firestore_v1.base_document import DocumentSnapshot
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -33,9 +34,7 @@
from google.cloud.firestore_v1 import document
from google.cloud.firestore_v1.watch import Watch
-from typing import Any
-from typing import Callable
-from typing import Generator
+from typing import Any, Callable, Generator, List
class Query(BaseQuery):
@@ -125,7 +124,7 @@ def get(
transaction=None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
- ) -> list:
+ ) -> List[DocumentSnapshot]:
"""Read the documents in the collection that match this query.
This sends a ``RunQuery`` RPC and returns a list of documents
diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py
index 59d6568033..777f3784df 100644
--- a/google/cloud/firestore_v1/services/firestore/async_client.py
+++ b/google/cloud/firestore_v1/services/firestore/async_client.py
@@ -91,7 +91,36 @@ class FirestoreAsyncClient:
FirestoreClient.parse_common_location_path
)
- from_service_account_file = FirestoreClient.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreAsyncClient: The constructed client.
+ """
+ return FirestoreClient.from_service_account_info.__func__(FirestoreAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreAsyncClient: The constructed client.
+ """
+ return FirestoreClient.from_service_account_file.__func__(FirestoreAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
@@ -166,7 +195,7 @@ async def get_document(
r"""Gets a single document.
Args:
- request (:class:`~.firestore.GetDocumentRequest`):
+ request (:class:`google.cloud.firestore_v1.types.GetDocumentRequest`):
The request object. The request for
[Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument].
@@ -177,7 +206,7 @@ async def get_document(
sent along with the request as metadata.
Returns:
- ~.document.Document:
+ google.cloud.firestore_v1.types.Document:
A Firestore document.
Must not exceed 1 MiB - 4 bytes.
@@ -199,6 +228,7 @@ async def get_document(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -227,7 +257,7 @@ async def list_documents(
r"""Lists documents.
Args:
- request (:class:`~.firestore.ListDocumentsRequest`):
+ request (:class:`google.cloud.firestore_v1.types.ListDocumentsRequest`):
The request object. The request for
[Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
@@ -238,7 +268,7 @@ async def list_documents(
sent along with the request as metadata.
Returns:
- ~.pagers.ListDocumentsAsyncPager:
+ google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsAsyncPager:
The response for
[Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
@@ -263,6 +293,7 @@ async def list_documents(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -299,17 +330,18 @@ async def update_document(
r"""Updates or inserts a document.
Args:
- request (:class:`~.firestore.UpdateDocumentRequest`):
+ request (:class:`google.cloud.firestore_v1.types.UpdateDocumentRequest`):
The request object. The request for
[Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument].
- document (:class:`~.gf_document.Document`):
+ document (:class:`google.cloud.firestore_v1.types.Document`):
Required. The updated document.
Creates the document if it does not
already exist.
+
This corresponds to the ``document`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.common.DocumentMask`):
+ update_mask (:class:`google.cloud.firestore_v1.types.DocumentMask`):
The fields to update.
None of the field paths in the mask may
contain a reserved name.
@@ -319,6 +351,7 @@ async def update_document(
Fields referenced in the mask, but not
present in the input document, are
deleted from the document on the server.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -330,7 +363,7 @@ async def update_document(
sent along with the request as metadata.
Returns:
- ~.gf_document.Document:
+ google.cloud.firestore_v1.types.Document:
A Firestore document.
Must not exceed 1 MiB - 4 bytes.
@@ -364,6 +397,7 @@ async def update_document(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -395,13 +429,14 @@ async def delete_document(
r"""Deletes a document.
Args:
- request (:class:`~.firestore.DeleteDocumentRequest`):
+ request (:class:`google.cloud.firestore_v1.types.DeleteDocumentRequest`):
The request object. The request for
[Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument].
name (:class:`str`):
Required. The resource name of the Document to delete.
In the format:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -443,6 +478,7 @@ async def delete_document(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -472,7 +508,7 @@ def batch_get_documents(
be returned in the same order that they were requested.
Args:
- request (:class:`~.firestore.BatchGetDocumentsRequest`):
+ request (:class:`google.cloud.firestore_v1.types.BatchGetDocumentsRequest`):
The request object. The request for
[Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
@@ -483,7 +519,7 @@ def batch_get_documents(
sent along with the request as metadata.
Returns:
- AsyncIterable[~.firestore.BatchGetDocumentsResponse]:
+ AsyncIterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]:
The streamed response for
[Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
@@ -505,6 +541,7 @@ def batch_get_documents(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=300.0,
),
default_timeout=300.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -534,12 +571,13 @@ async def begin_transaction(
r"""Starts a new transaction.
Args:
- request (:class:`~.firestore.BeginTransactionRequest`):
+ request (:class:`google.cloud.firestore_v1.types.BeginTransactionRequest`):
The request object. The request for
[Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
database (:class:`str`):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``database`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -551,7 +589,7 @@ async def begin_transaction(
sent along with the request as metadata.
Returns:
- ~.firestore.BeginTransactionResponse:
+ google.cloud.firestore_v1.types.BeginTransactionResponse:
The response for
[Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
@@ -587,6 +625,7 @@ async def begin_transaction(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -618,18 +657,20 @@ async def commit(
documents.
Args:
- request (:class:`~.firestore.CommitRequest`):
+ request (:class:`google.cloud.firestore_v1.types.CommitRequest`):
The request object. The request for
[Firestore.Commit][google.firestore.v1.Firestore.Commit].
database (:class:`str`):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``database`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- writes (:class:`Sequence[~.gf_write.Write]`):
+ writes (:class:`Sequence[google.cloud.firestore_v1.types.Write]`):
The writes to apply.
Always executed atomically and in order.
+
This corresponds to the ``writes`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -641,7 +682,7 @@ async def commit(
sent along with the request as metadata.
Returns:
- ~.firestore.CommitResponse:
+ google.cloud.firestore_v1.types.CommitResponse:
The response for
[Firestore.Commit][google.firestore.v1.Firestore.Commit].
@@ -676,6 +717,7 @@ async def commit(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -706,18 +748,20 @@ async def rollback(
r"""Rolls back a transaction.
Args:
- request (:class:`~.firestore.RollbackRequest`):
+ request (:class:`google.cloud.firestore_v1.types.RollbackRequest`):
The request object. The request for
[Firestore.Rollback][google.firestore.v1.Firestore.Rollback].
database (:class:`str`):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``database`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
transaction (:class:`bytes`):
Required. The transaction to roll
back.
+
This corresponds to the ``transaction`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -761,6 +805,7 @@ async def rollback(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -788,7 +833,7 @@ def run_query(
r"""Runs a query.
Args:
- request (:class:`~.firestore.RunQueryRequest`):
+ request (:class:`google.cloud.firestore_v1.types.RunQueryRequest`):
The request object. The request for
[Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
@@ -799,7 +844,7 @@ def run_query(
sent along with the request as metadata.
Returns:
- AsyncIterable[~.firestore.RunQueryResponse]:
+ AsyncIterable[google.cloud.firestore_v1.types.RunQueryResponse]:
The response for
[Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
@@ -821,6 +866,7 @@ def run_query(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=300.0,
),
default_timeout=300.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -853,7 +899,7 @@ async def partition_query(
results.
Args:
- request (:class:`~.firestore.PartitionQueryRequest`):
+ request (:class:`google.cloud.firestore_v1.types.PartitionQueryRequest`):
The request object. The request for
[Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
@@ -864,7 +910,7 @@ async def partition_query(
sent along with the request as metadata.
Returns:
- ~.pagers.PartitionQueryAsyncPager:
+ google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryAsyncPager:
The response for
[Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
@@ -889,6 +935,7 @@ async def partition_query(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=300.0,
),
default_timeout=300.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -924,7 +971,7 @@ def write(
order.
Args:
- requests (AsyncIterator[`~.firestore.WriteRequest`]):
+ requests (AsyncIterator[`google.cloud.firestore_v1.types.WriteRequest`]):
The request object AsyncIterator. The request for
[Firestore.Write][google.firestore.v1.Firestore.Write].
The first request creates a stream, or resumes an
@@ -944,7 +991,7 @@ def write(
sent along with the request as metadata.
Returns:
- AsyncIterable[~.firestore.WriteResponse]:
+ AsyncIterable[google.cloud.firestore_v1.types.WriteResponse]:
The response for
[Firestore.Write][google.firestore.v1.Firestore.Write].
@@ -979,7 +1026,7 @@ def listen(
r"""Listens to changes.
Args:
- requests (AsyncIterator[`~.firestore.ListenRequest`]):
+ requests (AsyncIterator[`google.cloud.firestore_v1.types.ListenRequest`]):
The request object AsyncIterator. A request for
[Firestore.Listen][google.firestore.v1.Firestore.Listen]
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -989,7 +1036,7 @@ def listen(
sent along with the request as metadata.
Returns:
- AsyncIterable[~.firestore.ListenResponse]:
+ AsyncIterable[google.cloud.firestore_v1.types.ListenResponse]:
The response for
[Firestore.Listen][google.firestore.v1.Firestore.Listen].
@@ -1008,6 +1055,7 @@ def listen(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=86400.0,
),
default_timeout=86400.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -1035,7 +1083,7 @@ async def list_collection_ids(
r"""Lists all the collection IDs underneath a document.
Args:
- request (:class:`~.firestore.ListCollectionIdsRequest`):
+ request (:class:`google.cloud.firestore_v1.types.ListCollectionIdsRequest`):
The request object. The request for
[Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
parent (:class:`str`):
@@ -1043,6 +1091,7 @@ async def list_collection_ids(
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
For example:
``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -1054,7 +1103,7 @@ async def list_collection_ids(
sent along with the request as metadata.
Returns:
- ~.pagers.ListCollectionIdsAsyncPager:
+ google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsAsyncPager:
The response from
[Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
@@ -1093,6 +1142,7 @@ async def list_collection_ids(
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -1137,7 +1187,7 @@ async def batch_write(
[Commit][google.firestore.v1.Firestore.Commit] instead.
Args:
- request (:class:`~.firestore.BatchWriteRequest`):
+ request (:class:`google.cloud.firestore_v1.types.BatchWriteRequest`):
The request object. The request for
[Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
@@ -1148,7 +1198,7 @@ async def batch_write(
sent along with the request as metadata.
Returns:
- ~.firestore.BatchWriteResponse:
+ google.cloud.firestore_v1.types.BatchWriteResponse:
The response from
[Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
@@ -1168,6 +1218,7 @@ async def batch_write(
predicate=retries.if_exception_type(
exceptions.Aborted, exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -1196,7 +1247,7 @@ async def create_document(
r"""Creates a new document.
Args:
- request (:class:`~.firestore.CreateDocumentRequest`):
+ request (:class:`google.cloud.firestore_v1.types.CreateDocumentRequest`):
The request object. The request for
[Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument].
@@ -1207,7 +1258,7 @@ async def create_document(
sent along with the request as metadata.
Returns:
- ~.document.Document:
+ google.cloud.firestore_v1.types.Document:
A Firestore document.
Must not exceed 1 MiB - 4 bytes.
@@ -1225,6 +1276,7 @@ async def create_document(
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py
index 88355df987..bd451dc257 100644
--- a/google/cloud/firestore_v1/services/firestore/client.py
+++ b/google/cloud/firestore_v1/services/firestore/client.py
@@ -133,6 +133,22 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ FirestoreClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -145,7 +161,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ FirestoreClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -237,10 +253,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.FirestoreTransport]): The
+ transport (Union[str, FirestoreTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -276,21 +292,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -333,7 +345,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -349,7 +361,7 @@ def get_document(
r"""Gets a single document.
Args:
- request (:class:`~.firestore.GetDocumentRequest`):
+ request (google.cloud.firestore_v1.types.GetDocumentRequest):
The request object. The request for
[Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument].
@@ -360,7 +372,7 @@ def get_document(
sent along with the request as metadata.
Returns:
- ~.document.Document:
+ google.cloud.firestore_v1.types.Document:
A Firestore document.
Must not exceed 1 MiB - 4 bytes.
@@ -401,7 +413,7 @@ def list_documents(
r"""Lists documents.
Args:
- request (:class:`~.firestore.ListDocumentsRequest`):
+ request (google.cloud.firestore_v1.types.ListDocumentsRequest):
The request object. The request for
[Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
@@ -412,7 +424,7 @@ def list_documents(
sent along with the request as metadata.
Returns:
- ~.pagers.ListDocumentsPager:
+ google.cloud.firestore_v1.services.firestore.pagers.ListDocumentsPager:
The response for
[Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
@@ -464,17 +476,18 @@ def update_document(
r"""Updates or inserts a document.
Args:
- request (:class:`~.firestore.UpdateDocumentRequest`):
+ request (google.cloud.firestore_v1.types.UpdateDocumentRequest):
The request object. The request for
[Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument].
- document (:class:`~.gf_document.Document`):
+ document (google.cloud.firestore_v1.types.Document):
Required. The updated document.
Creates the document if it does not
already exist.
+
This corresponds to the ``document`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.common.DocumentMask`):
+ update_mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to update.
None of the field paths in the mask may
contain a reserved name.
@@ -484,6 +497,7 @@ def update_document(
Fields referenced in the mask, but not
present in the input document, are
deleted from the document on the server.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -495,7 +509,7 @@ def update_document(
sent along with the request as metadata.
Returns:
- ~.gf_document.Document:
+ google.cloud.firestore_v1.types.Document:
A Firestore document.
Must not exceed 1 MiB - 4 bytes.
@@ -555,13 +569,14 @@ def delete_document(
r"""Deletes a document.
Args:
- request (:class:`~.firestore.DeleteDocumentRequest`):
+ request (google.cloud.firestore_v1.types.DeleteDocumentRequest):
The request object. The request for
[Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument].
- name (:class:`str`):
+ name (str):
Required. The resource name of the Document to delete.
In the format:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -623,7 +638,7 @@ def batch_get_documents(
be returned in the same order that they were requested.
Args:
- request (:class:`~.firestore.BatchGetDocumentsRequest`):
+ request (google.cloud.firestore_v1.types.BatchGetDocumentsRequest):
The request object. The request for
[Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
@@ -634,7 +649,7 @@ def batch_get_documents(
sent along with the request as metadata.
Returns:
- Iterable[~.firestore.BatchGetDocumentsResponse]:
+ Iterable[google.cloud.firestore_v1.types.BatchGetDocumentsResponse]:
The streamed response for
[Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
@@ -676,12 +691,13 @@ def begin_transaction(
r"""Starts a new transaction.
Args:
- request (:class:`~.firestore.BeginTransactionRequest`):
+ request (google.cloud.firestore_v1.types.BeginTransactionRequest):
The request object. The request for
[Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
- database (:class:`str`):
+ database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``database`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -693,7 +709,7 @@ def begin_transaction(
sent along with the request as metadata.
Returns:
- ~.firestore.BeginTransactionResponse:
+ google.cloud.firestore_v1.types.BeginTransactionResponse:
The response for
[Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction].
@@ -751,18 +767,20 @@ def commit(
documents.
Args:
- request (:class:`~.firestore.CommitRequest`):
+ request (google.cloud.firestore_v1.types.CommitRequest):
The request object. The request for
[Firestore.Commit][google.firestore.v1.Firestore.Commit].
- database (:class:`str`):
+ database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``database`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- writes (:class:`Sequence[~.gf_write.Write]`):
+ writes (Sequence[google.cloud.firestore_v1.types.Write]):
The writes to apply.
Always executed atomically and in order.
+
This corresponds to the ``writes`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -774,7 +792,7 @@ def commit(
sent along with the request as metadata.
Returns:
- ~.firestore.CommitResponse:
+ google.cloud.firestore_v1.types.CommitResponse:
The response for
[Firestore.Commit][google.firestore.v1.Firestore.Commit].
@@ -801,9 +819,8 @@ def commit(
if database is not None:
request.database = database
-
- if writes:
- request.writes.extend(writes)
+ if writes is not None:
+ request.writes = writes
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -834,18 +851,20 @@ def rollback(
r"""Rolls back a transaction.
Args:
- request (:class:`~.firestore.RollbackRequest`):
+ request (google.cloud.firestore_v1.types.RollbackRequest):
The request object. The request for
[Firestore.Rollback][google.firestore.v1.Firestore.Rollback].
- database (:class:`str`):
+ database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
+
This corresponds to the ``database`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- transaction (:class:`bytes`):
+ transaction (bytes):
Required. The transaction to roll
back.
+
This corresponds to the ``transaction`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -907,7 +926,7 @@ def run_query(
r"""Runs a query.
Args:
- request (:class:`~.firestore.RunQueryRequest`):
+ request (google.cloud.firestore_v1.types.RunQueryRequest):
The request object. The request for
[Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
@@ -918,7 +937,7 @@ def run_query(
sent along with the request as metadata.
Returns:
- Iterable[~.firestore.RunQueryResponse]:
+ Iterable[google.cloud.firestore_v1.types.RunQueryResponse]:
The response for
[Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery].
@@ -963,7 +982,7 @@ def partition_query(
results.
Args:
- request (:class:`~.firestore.PartitionQueryRequest`):
+ request (google.cloud.firestore_v1.types.PartitionQueryRequest):
The request object. The request for
[Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
@@ -974,7 +993,7 @@ def partition_query(
sent along with the request as metadata.
Returns:
- ~.pagers.PartitionQueryPager:
+ google.cloud.firestore_v1.services.firestore.pagers.PartitionQueryPager:
The response for
[Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
@@ -1025,7 +1044,7 @@ def write(
order.
Args:
- requests (Iterator[`~.firestore.WriteRequest`]):
+ requests (Iterator[google.cloud.firestore_v1.types.WriteRequest]):
The request object iterator. The request for
[Firestore.Write][google.firestore.v1.Firestore.Write].
The first request creates a stream, or resumes an
@@ -1045,7 +1064,7 @@ def write(
sent along with the request as metadata.
Returns:
- Iterable[~.firestore.WriteResponse]:
+ Iterable[google.cloud.firestore_v1.types.WriteResponse]:
The response for
[Firestore.Write][google.firestore.v1.Firestore.Write].
@@ -1076,7 +1095,7 @@ def listen(
r"""Listens to changes.
Args:
- requests (Iterator[`~.firestore.ListenRequest`]):
+ requests (Iterator[google.cloud.firestore_v1.types.ListenRequest]):
The request object iterator. A request for
[Firestore.Listen][google.firestore.v1.Firestore.Listen]
retry (google.api_core.retry.Retry): Designation of what errors, if any,
@@ -1086,7 +1105,7 @@ def listen(
sent along with the request as metadata.
Returns:
- Iterable[~.firestore.ListenResponse]:
+ Iterable[google.cloud.firestore_v1.types.ListenResponse]:
The response for
[Firestore.Listen][google.firestore.v1.Firestore.Listen].
@@ -1118,14 +1137,15 @@ def list_collection_ids(
r"""Lists all the collection IDs underneath a document.
Args:
- request (:class:`~.firestore.ListCollectionIdsRequest`):
+ request (google.cloud.firestore_v1.types.ListCollectionIdsRequest):
The request object. The request for
[Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
- parent (:class:`str`):
+ parent (str):
Required. The parent document. In the format:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
For example:
``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -1137,7 +1157,7 @@ def list_collection_ids(
sent along with the request as metadata.
Returns:
- ~.pagers.ListCollectionIdsPager:
+ google.cloud.firestore_v1.services.firestore.pagers.ListCollectionIdsPager:
The response from
[Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds].
@@ -1211,7 +1231,7 @@ def batch_write(
[Commit][google.firestore.v1.Firestore.Commit] instead.
Args:
- request (:class:`~.firestore.BatchWriteRequest`):
+ request (google.cloud.firestore_v1.types.BatchWriteRequest):
The request object. The request for
[Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
@@ -1222,7 +1242,7 @@ def batch_write(
sent along with the request as metadata.
Returns:
- ~.firestore.BatchWriteResponse:
+ google.cloud.firestore_v1.types.BatchWriteResponse:
The response from
[Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
@@ -1263,7 +1283,7 @@ def create_document(
r"""Creates a new document.
Args:
- request (:class:`~.firestore.CreateDocumentRequest`):
+ request (google.cloud.firestore_v1.types.CreateDocumentRequest):
The request object. The request for
[Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument].
@@ -1274,7 +1294,7 @@ def create_document(
sent along with the request as metadata.
Returns:
- ~.document.Document:
+ google.cloud.firestore_v1.types.Document:
A Firestore document.
Must not exceed 1 MiB - 4 bytes.
diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py
index 708ec0adef..8a74a14e45 100644
--- a/google/cloud/firestore_v1/services/firestore/pagers.py
+++ b/google/cloud/firestore_v1/services/firestore/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.types import firestore
@@ -26,7 +35,7 @@ class ListDocumentsPager:
"""A pager for iterating through ``list_documents`` requests.
This class thinly wraps an initial
- :class:`~.firestore.ListDocumentsResponse` object, and
+ :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and
provides an ``__iter__`` method to iterate through its
``documents`` field.
@@ -35,7 +44,7 @@ class ListDocumentsPager:
through the ``documents`` field on the
corresponding responses.
- All the usual :class:`~.firestore.ListDocumentsResponse`
+ All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -53,9 +62,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore.ListDocumentsRequest`):
+ request (google.cloud.firestore_v1.types.ListDocumentsRequest):
The initial request object.
- response (:class:`~.firestore.ListDocumentsResponse`):
+ response (google.cloud.firestore_v1.types.ListDocumentsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -88,7 +97,7 @@ class ListDocumentsAsyncPager:
"""A pager for iterating through ``list_documents`` requests.
This class thinly wraps an initial
- :class:`~.firestore.ListDocumentsResponse` object, and
+ :class:`google.cloud.firestore_v1.types.ListDocumentsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``documents`` field.
@@ -97,7 +106,7 @@ class ListDocumentsAsyncPager:
through the ``documents`` field on the
corresponding responses.
- All the usual :class:`~.firestore.ListDocumentsResponse`
+ All the usual :class:`google.cloud.firestore_v1.types.ListDocumentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -115,9 +124,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore.ListDocumentsRequest`):
+ request (google.cloud.firestore_v1.types.ListDocumentsRequest):
The initial request object.
- response (:class:`~.firestore.ListDocumentsResponse`):
+ response (google.cloud.firestore_v1.types.ListDocumentsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -154,7 +163,7 @@ class PartitionQueryPager:
"""A pager for iterating through ``partition_query`` requests.
This class thinly wraps an initial
- :class:`~.firestore.PartitionQueryResponse` object, and
+ :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and
provides an ``__iter__`` method to iterate through its
``partitions`` field.
@@ -163,7 +172,7 @@ class PartitionQueryPager:
through the ``partitions`` field on the
corresponding responses.
- All the usual :class:`~.firestore.PartitionQueryResponse`
+ All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -181,9 +190,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore.PartitionQueryRequest`):
+ request (google.cloud.firestore_v1.types.PartitionQueryRequest):
The initial request object.
- response (:class:`~.firestore.PartitionQueryResponse`):
+ response (google.cloud.firestore_v1.types.PartitionQueryResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -216,7 +225,7 @@ class PartitionQueryAsyncPager:
"""A pager for iterating through ``partition_query`` requests.
This class thinly wraps an initial
- :class:`~.firestore.PartitionQueryResponse` object, and
+ :class:`google.cloud.firestore_v1.types.PartitionQueryResponse` object, and
provides an ``__aiter__`` method to iterate through its
``partitions`` field.
@@ -225,7 +234,7 @@ class PartitionQueryAsyncPager:
through the ``partitions`` field on the
corresponding responses.
- All the usual :class:`~.firestore.PartitionQueryResponse`
+ All the usual :class:`google.cloud.firestore_v1.types.PartitionQueryResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -243,9 +252,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore.PartitionQueryRequest`):
+ request (google.cloud.firestore_v1.types.PartitionQueryRequest):
The initial request object.
- response (:class:`~.firestore.PartitionQueryResponse`):
+ response (google.cloud.firestore_v1.types.PartitionQueryResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -282,7 +291,7 @@ class ListCollectionIdsPager:
"""A pager for iterating through ``list_collection_ids`` requests.
This class thinly wraps an initial
- :class:`~.firestore.ListCollectionIdsResponse` object, and
+ :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and
provides an ``__iter__`` method to iterate through its
``collection_ids`` field.
@@ -291,7 +300,7 @@ class ListCollectionIdsPager:
through the ``collection_ids`` field on the
corresponding responses.
- All the usual :class:`~.firestore.ListCollectionIdsResponse`
+ All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -309,9 +318,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore.ListCollectionIdsRequest`):
+ request (google.cloud.firestore_v1.types.ListCollectionIdsRequest):
The initial request object.
- response (:class:`~.firestore.ListCollectionIdsResponse`):
+ response (google.cloud.firestore_v1.types.ListCollectionIdsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -344,7 +353,7 @@ class ListCollectionIdsAsyncPager:
"""A pager for iterating through ``list_collection_ids`` requests.
This class thinly wraps an initial
- :class:`~.firestore.ListCollectionIdsResponse` object, and
+ :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``collection_ids`` field.
@@ -353,7 +362,7 @@ class ListCollectionIdsAsyncPager:
through the ``collection_ids`` field on the
corresponding responses.
- All the usual :class:`~.firestore.ListCollectionIdsResponse`
+ All the usual :class:`google.cloud.firestore_v1.types.ListCollectionIdsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -371,9 +380,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.firestore.ListCollectionIdsRequest`):
+ request (google.cloud.firestore_v1.types.ListCollectionIdsRequest):
The initial request object.
- response (:class:`~.firestore.ListCollectionIdsResponse`):
+ response (google.cloud.firestore_v1.types.ListCollectionIdsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1/services/firestore/transports/__init__.py
index ce6aa3a9d1..11ecff7619 100644
--- a/google/cloud/firestore_v1/services/firestore/transports/__init__.py
+++ b/google/cloud/firestore_v1/services/firestore/transports/__init__.py
@@ -28,7 +28,6 @@
_transport_registry["grpc"] = FirestoreGrpcTransport
_transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport
-
__all__ = (
"FirestoreTransport",
"FirestoreGrpcTransport",
diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py
index 6a0e3a7d36..8ae14a6298 100644
--- a/google/cloud/firestore_v1/services/firestore/transports/base.py
+++ b/google/cloud/firestore_v1/services/firestore/transports/base.py
@@ -73,10 +73,10 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -84,6 +84,9 @@ def __init__(
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
@@ -93,20 +96,17 @@ def __init__(
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ scopes=self._scopes, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -121,6 +121,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -136,6 +137,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -147,6 +149,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -162,6 +165,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -177,6 +181,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
@@ -192,6 +197,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -203,6 +209,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -218,6 +225,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -233,6 +241,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
@@ -248,6 +257,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
@@ -266,6 +276,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=86400.0,
),
default_timeout=86400.0,
client_info=client_info,
@@ -281,6 +292,7 @@ def _prep_wrapped_messages(self, client_info):
exceptions.InternalServerError,
exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -294,6 +306,7 @@ def _prep_wrapped_messages(self, client_info):
predicate=retries.if_exception_type(
exceptions.Aborted, exceptions.ServiceUnavailable,
),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -305,6 +318,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py
index 7e06e6321c..82aa10fba6 100644
--- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py
+++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py
@@ -67,6 +67,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -97,6 +98,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -111,83 +116,70 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
- self._ssl_channel_credentials = ssl_credentials
else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
-
- self._stubs = {} # type: Dict[str, Callable]
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
@classmethod
def create_channel(
cls,
@@ -200,7 +192,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py
index 9088560d77..40165168eb 100644
--- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py
+++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py
@@ -71,7 +71,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -111,6 +111,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -142,12 +143,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -156,82 +161,69 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
- else:
- ssl_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
- self._ssl_channel_credentials = ssl_credentials
else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- )
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # Run the base constructor.
+ # The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
- self._stubs = {}
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py
index 50f61964c8..a353384a95 100644
--- a/google/cloud/firestore_v1/types/__init__.py
+++ b/google/cloud/firestore_v1/types/__init__.py
@@ -21,97 +21,96 @@
TransactionOptions,
)
from .document import (
- Document,
- Value,
ArrayValue,
+ Document,
MapValue,
-)
-from .query import (
- StructuredQuery,
- Cursor,
-)
-from .write import (
- Write,
- DocumentTransform,
- WriteResult,
- DocumentChange,
- DocumentDelete,
- DocumentRemove,
- ExistenceFilter,
+ Value,
)
from .firestore import (
- GetDocumentRequest,
- ListDocumentsRequest,
- ListDocumentsResponse,
- CreateDocumentRequest,
- UpdateDocumentRequest,
- DeleteDocumentRequest,
BatchGetDocumentsRequest,
BatchGetDocumentsResponse,
+ BatchWriteRequest,
+ BatchWriteResponse,
BeginTransactionRequest,
BeginTransactionResponse,
CommitRequest,
CommitResponse,
+ CreateDocumentRequest,
+ DeleteDocumentRequest,
+ GetDocumentRequest,
+ ListCollectionIdsRequest,
+ ListCollectionIdsResponse,
+ ListDocumentsRequest,
+ ListDocumentsResponse,
+ ListenRequest,
+ ListenResponse,
+ PartitionQueryRequest,
+ PartitionQueryResponse,
RollbackRequest,
RunQueryRequest,
RunQueryResponse,
- PartitionQueryRequest,
- PartitionQueryResponse,
- WriteRequest,
- WriteResponse,
- ListenRequest,
- ListenResponse,
Target,
TargetChange,
- ListCollectionIdsRequest,
- ListCollectionIdsResponse,
- BatchWriteRequest,
- BatchWriteResponse,
+ UpdateDocumentRequest,
+ WriteRequest,
+ WriteResponse,
+)
+from .query import (
+ Cursor,
+ StructuredQuery,
+)
+from .write import (
+ DocumentChange,
+ DocumentDelete,
+ DocumentRemove,
+ DocumentTransform,
+ ExistenceFilter,
+ Write,
+ WriteResult,
)
-
__all__ = (
"DocumentMask",
"Precondition",
"TransactionOptions",
- "Document",
- "Value",
"ArrayValue",
+ "Document",
"MapValue",
- "StructuredQuery",
- "Cursor",
- "Write",
- "DocumentTransform",
- "WriteResult",
- "DocumentChange",
- "DocumentDelete",
- "DocumentRemove",
- "ExistenceFilter",
- "GetDocumentRequest",
- "ListDocumentsRequest",
- "ListDocumentsResponse",
- "CreateDocumentRequest",
- "UpdateDocumentRequest",
- "DeleteDocumentRequest",
+ "Value",
"BatchGetDocumentsRequest",
"BatchGetDocumentsResponse",
+ "BatchWriteRequest",
+ "BatchWriteResponse",
"BeginTransactionRequest",
"BeginTransactionResponse",
"CommitRequest",
"CommitResponse",
+ "CreateDocumentRequest",
+ "DeleteDocumentRequest",
+ "GetDocumentRequest",
+ "ListCollectionIdsRequest",
+ "ListCollectionIdsResponse",
+ "ListDocumentsRequest",
+ "ListDocumentsResponse",
+ "ListenRequest",
+ "ListenResponse",
+ "PartitionQueryRequest",
+ "PartitionQueryResponse",
"RollbackRequest",
"RunQueryRequest",
"RunQueryResponse",
- "PartitionQueryRequest",
- "PartitionQueryResponse",
- "WriteRequest",
- "WriteResponse",
- "ListenRequest",
- "ListenResponse",
"Target",
"TargetChange",
- "ListCollectionIdsRequest",
- "ListCollectionIdsResponse",
- "BatchWriteRequest",
- "BatchWriteResponse",
+ "UpdateDocumentRequest",
+ "WriteRequest",
+ "WriteResponse",
+ "Cursor",
+ "StructuredQuery",
+ "DocumentChange",
+ "DocumentDelete",
+ "DocumentRemove",
+ "DocumentTransform",
+ "ExistenceFilter",
+ "Write",
+ "WriteResult",
)
diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py
index b03242a4a8..2fc5171d6c 100644
--- a/google/cloud/firestore_v1/types/common.py
+++ b/google/cloud/firestore_v1/types/common.py
@@ -52,7 +52,7 @@ class Precondition(proto.Message):
exists (bool):
When set to ``true``, the target document must exist. When
set to ``false``, the target document must not exist.
- update_time (~.timestamp.Timestamp):
+ update_time (google.protobuf.timestamp_pb2.Timestamp):
When set, the target document must exist and
have been last updated at that time.
"""
@@ -68,10 +68,10 @@ class TransactionOptions(proto.Message):
r"""Options for creating a new transaction.
Attributes:
- read_only (~.common.TransactionOptions.ReadOnly):
+ read_only (google.cloud.firestore_v1.types.TransactionOptions.ReadOnly):
The transaction can only be used for read
operations.
- read_write (~.common.TransactionOptions.ReadWrite):
+ read_write (google.cloud.firestore_v1.types.TransactionOptions.ReadWrite):
The transaction can be used for both read and
write operations.
"""
@@ -92,7 +92,7 @@ class ReadOnly(proto.Message):
documents.
Attributes:
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
Reads documents at the given time.
This may not be older than 60 seconds.
"""
diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py
index 2f3b2759a6..26ecf45cf5 100644
--- a/google/cloud/firestore_v1/types/document.py
+++ b/google/cloud/firestore_v1/types/document.py
@@ -37,7 +37,7 @@ class Document(proto.Message):
name (str):
The resource name of the document, for example
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- fields (Sequence[~.document.Document.FieldsEntry]):
+ fields (Sequence[google.cloud.firestore_v1.types.Document.FieldsEntry]):
The document's fields.
The map keys represent field names.
@@ -64,13 +64,13 @@ class Document(proto.Message):
characters, including :literal:`\``, must be escaped using a
``\``. For example, :literal:`\`x&y\`` represents ``x&y``
and :literal:`\`bak\`tik\`` represents :literal:`bak`tik`.
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time at which the document was created.
This value increases monotonically when a document is
deleted then recreated. It can also be compared to values
from other documents and the ``read_time`` of a query.
- update_time (~.timestamp.Timestamp):
+ update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time at which the document was last
changed.
@@ -93,7 +93,7 @@ class Value(proto.Message):
r"""A message that can hold any of the supported value types.
Attributes:
- null_value (~.struct.NullValue):
+ null_value (google.protobuf.struct_pb2.NullValue):
A null value.
boolean_value (bool):
A boolean value.
@@ -101,7 +101,7 @@ class Value(proto.Message):
An integer value.
double_value (float):
A double value.
- timestamp_value (~.timestamp.Timestamp):
+ timestamp_value (google.protobuf.timestamp_pb2.Timestamp):
A timestamp value.
Precise only to microseconds. When stored, any
additional precision is rounded down.
@@ -119,15 +119,15 @@ class Value(proto.Message):
reference_value (str):
A reference to a document. For example:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- geo_point_value (~.latlng.LatLng):
+ geo_point_value (google.type.latlng_pb2.LatLng):
A geo point value representing a point on the
surface of Earth.
- array_value (~.document.ArrayValue):
+ array_value (google.cloud.firestore_v1.types.ArrayValue):
An array value.
Cannot directly contain another array value,
though can contain an map which contains another
array.
- map_value (~.document.MapValue):
+ map_value (google.cloud.firestore_v1.types.MapValue):
A map value.
"""
@@ -168,7 +168,7 @@ class ArrayValue(proto.Message):
r"""An array value.
Attributes:
- values (Sequence[~.document.Value]):
+ values (Sequence[google.cloud.firestore_v1.types.Value]):
Values in the array.
"""
@@ -179,7 +179,7 @@ class MapValue(proto.Message):
r"""A map value.
Attributes:
- fields (Sequence[~.document.MapValue.FieldsEntry]):
+ fields (Sequence[google.cloud.firestore_v1.types.MapValue.FieldsEntry]):
The map's fields.
The map keys represent field names. Field names matching the
diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py
index 345d67f709..78cfd5d7aa 100644
--- a/google/cloud/firestore_v1/types/firestore.py
+++ b/google/cloud/firestore_v1/types/firestore.py
@@ -69,7 +69,7 @@ class GetDocumentRequest(proto.Message):
Required. The resource name of the Document to get. In the
format:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- mask (~.common.DocumentMask):
+ mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to return. If not set, returns all
fields.
If the document has a field that is not present
@@ -77,7 +77,7 @@ class GetDocumentRequest(proto.Message):
the response.
transaction (bytes):
Reads the document in a transaction.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
Reads the version of the document at the
given time. This may not be older than 270
seconds.
@@ -121,7 +121,7 @@ class ListDocumentsRequest(proto.Message):
order_by (str):
The order to sort results by. For example:
``priority desc, name``.
- mask (~.common.DocumentMask):
+ mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to return. If not set, returns all
fields.
If a document has a field that is not present in
@@ -129,7 +129,7 @@ class ListDocumentsRequest(proto.Message):
the response.
transaction (bytes):
Reads documents in a transaction.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
Reads documents as they were at the given
time. This may not be older than 270 seconds.
show_missing (bool):
@@ -175,7 +175,7 @@ class ListDocumentsResponse(proto.Message):
[Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
Attributes:
- documents (Sequence[~.gf_document.Document]):
+ documents (Sequence[google.cloud.firestore_v1.types.Document]):
The Documents found.
next_page_token (str):
The next page token.
@@ -210,9 +210,9 @@ class CreateDocumentRequest(proto.Message):
this document.
Optional. If not specified, an ID will be
assigned by the service.
- document (~.gf_document.Document):
+ document (google.cloud.firestore_v1.types.Document):
Required. The document to create. ``name`` must not be set.
- mask (~.common.DocumentMask):
+ mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to return. If not set, returns all
fields.
If the document has a field that is not present
@@ -236,11 +236,11 @@ class UpdateDocumentRequest(proto.Message):
[Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument].
Attributes:
- document (~.gf_document.Document):
+ document (google.cloud.firestore_v1.types.Document):
Required. The updated document.
Creates the document if it does not already
exist.
- update_mask (~.common.DocumentMask):
+ update_mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to update.
None of the field paths in the mask may contain
a reserved name.
@@ -250,13 +250,13 @@ class UpdateDocumentRequest(proto.Message):
Fields referenced in the mask, but not present
in the input document, are deleted from the
document on the server.
- mask (~.common.DocumentMask):
+ mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to return. If not set, returns all
fields.
If the document has a field that is not present
in this mask, that field will not be returned in
the response.
- current_document (~.common.Precondition):
+ current_document (google.cloud.firestore_v1.types.Precondition):
An optional precondition on the document.
The request will fail if this is set and not met
by the target document.
@@ -282,7 +282,7 @@ class DeleteDocumentRequest(proto.Message):
Required. The resource name of the Document to delete. In
the format:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- current_document (~.common.Precondition):
+ current_document (google.cloud.firestore_v1.types.Precondition):
An optional precondition on the document.
The request will fail if this is set and not met
by the target document.
@@ -309,7 +309,7 @@ class BatchGetDocumentsRequest(proto.Message):
The request will fail if any of the document is not a child
resource of the given ``database``. Duplicate names will be
elided.
- mask (~.common.DocumentMask):
+ mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to return. If not set, returns all
fields.
If a document has a field that is not present in
@@ -317,12 +317,12 @@ class BatchGetDocumentsRequest(proto.Message):
the response.
transaction (bytes):
Reads documents in a transaction.
- new_transaction (~.common.TransactionOptions):
+ new_transaction (google.cloud.firestore_v1.types.TransactionOptions):
Starts a new transaction and reads the
documents. Defaults to a read-only transaction.
The new transaction ID will be returned as the
first response in the stream.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
Reads documents as they were at the given
time. This may not be older than 270 seconds.
"""
@@ -355,7 +355,7 @@ class BatchGetDocumentsResponse(proto.Message):
[Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments].
Attributes:
- found (~.gf_document.Document):
+ found (google.cloud.firestore_v1.types.Document):
A document that was requested.
missing (str):
A document name that was requested but does not exist. In
@@ -366,7 +366,7 @@ class BatchGetDocumentsResponse(proto.Message):
Will only be set in the first response, and only if
[BatchGetDocumentsRequest.new_transaction][google.firestore.v1.BatchGetDocumentsRequest.new_transaction]
was set in the request.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
The time at which the document was read. This may be
monotically increasing, in this case the previous documents
in the result stream are guaranteed not to have changed
@@ -392,7 +392,7 @@ class BeginTransactionRequest(proto.Message):
database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
- options (~.common.TransactionOptions):
+ options (google.cloud.firestore_v1.types.TransactionOptions):
The options for the transaction.
Defaults to a read-write transaction.
"""
@@ -422,7 +422,7 @@ class CommitRequest(proto.Message):
database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
- writes (Sequence[~.write.Write]):
+ writes (Sequence[google.cloud.firestore_v1.types.Write]):
The writes to apply.
Always executed atomically and in order.
transaction (bytes):
@@ -442,11 +442,11 @@ class CommitResponse(proto.Message):
[Firestore.Commit][google.firestore.v1.Firestore.Commit].
Attributes:
- write_results (Sequence[~.write.WriteResult]):
+ write_results (Sequence[google.cloud.firestore_v1.types.WriteResult]):
The result of applying the writes.
This i-th write result corresponds to the i-th
write in the request.
- commit_time (~.timestamp.Timestamp):
+ commit_time (google.protobuf.timestamp_pb2.Timestamp):
The time at which the commit occurred. Any read with an
equal or greater ``read_time`` is guaranteed to see the
effects of the commit.
@@ -489,16 +489,16 @@ class RunQueryRequest(proto.Message):
For example:
``projects/my-project/databases/my-database/documents`` or
``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- structured_query (~.gf_query.StructuredQuery):
+ structured_query (google.cloud.firestore_v1.types.StructuredQuery):
A structured query.
transaction (bytes):
Reads documents in a transaction.
- new_transaction (~.common.TransactionOptions):
+ new_transaction (google.cloud.firestore_v1.types.TransactionOptions):
Starts a new transaction and reads the
documents. Defaults to a read-only transaction.
The new transaction ID will be returned as the
first response in the stream.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
Reads documents as they were at the given
time. This may not be older than 270 seconds.
"""
@@ -537,10 +537,10 @@ class RunQueryResponse(proto.Message):
[RunQueryRequest.new_transaction][google.firestore.v1.RunQueryRequest.new_transaction]
was set in the request. If set, no other fields will be set
in this response.
- document (~.gf_document.Document):
+ document (google.cloud.firestore_v1.types.Document):
A query result.
Not set when reporting partial progress.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
The time at which the document was read. This may be
monotonically increasing; in this case, the previous
documents in the result stream are guaranteed not to have
@@ -574,7 +574,7 @@ class PartitionQueryRequest(proto.Message):
``projects/{project_id}/databases/{database_id}/documents``.
Document resource names are not supported; only database
resource names can be specified.
- structured_query (~.gf_query.StructuredQuery):
+ structured_query (google.cloud.firestore_v1.types.StructuredQuery):
A structured query.
Query must specify collection with all
descendants and be ordered by name ascending.
@@ -639,7 +639,7 @@ class PartitionQueryResponse(proto.Message):
[Firestore.PartitionQuery][google.firestore.v1.Firestore.PartitionQuery].
Attributes:
- partitions (Sequence[~.gf_query.Cursor]):
+ partitions (Sequence[google.cloud.firestore_v1.types.Cursor]):
Partition results. Each partition is a split point that can
be used by RunQuery as a starting or end point for the query
results. The RunQuery requests must be made with the same
@@ -696,7 +696,7 @@ class WriteRequest(proto.Message):
The ID of the write stream to resume.
This may only be set in the first message. When
left empty, a new write stream will be created.
- writes (Sequence[~.write.Write]):
+ writes (Sequence[google.cloud.firestore_v1.types.Write]):
The writes to apply.
Always executed atomically and in order.
This must be empty on the first request.
@@ -719,7 +719,7 @@ class WriteRequest(proto.Message):
``stream_id`` field.
Leave this field unset when creating a new stream.
- labels (Sequence[~.firestore.WriteRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.firestore_v1.types.WriteRequest.LabelsEntry]):
Labels associated with this write request.
"""
@@ -748,11 +748,11 @@ class WriteResponse(proto.Message):
response in the stream. This can be used by a
client to resume the stream at this point.
This field is always set.
- write_results (Sequence[~.write.WriteResult]):
+ write_results (Sequence[google.cloud.firestore_v1.types.WriteResult]):
The result of applying the writes.
This i-th write result corresponds to the i-th
write in the request.
- commit_time (~.timestamp.Timestamp):
+ commit_time (google.protobuf.timestamp_pb2.Timestamp):
The time at which the commit occurred. Any read with an
equal or greater ``read_time`` is guaranteed to see the
effects of the write.
@@ -777,12 +777,12 @@ class ListenRequest(proto.Message):
database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
- add_target (~.firestore.Target):
+ add_target (google.cloud.firestore_v1.types.Target):
A target to add to this stream.
remove_target (int):
The ID of a target to remove from this
stream.
- labels (Sequence[~.firestore.ListenRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.firestore_v1.types.ListenRequest.LabelsEntry]):
Labels associated with this target change.
"""
@@ -802,17 +802,17 @@ class ListenResponse(proto.Message):
[Firestore.Listen][google.firestore.v1.Firestore.Listen].
Attributes:
- target_change (~.firestore.TargetChange):
+ target_change (google.cloud.firestore_v1.types.TargetChange):
Targets have changed.
- document_change (~.write.DocumentChange):
+ document_change (google.cloud.firestore_v1.types.DocumentChange):
A [Document][google.firestore.v1.Document] has changed.
- document_delete (~.write.DocumentDelete):
+ document_delete (google.cloud.firestore_v1.types.DocumentDelete):
A [Document][google.firestore.v1.Document] has been deleted.
- document_remove (~.write.DocumentRemove):
+ document_remove (google.cloud.firestore_v1.types.DocumentRemove):
A [Document][google.firestore.v1.Document] has been removed
from a target (because it is no longer relevant to that
target).
- filter (~.write.ExistenceFilter):
+ filter (google.cloud.firestore_v1.types.ExistenceFilter):
A filter to apply to the set of documents
previously returned for the given target.
@@ -846,9 +846,9 @@ class Target(proto.Message):
r"""A specification of a set of documents to listen to.
Attributes:
- query (~.firestore.Target.QueryTarget):
+ query (google.cloud.firestore_v1.types.Target.QueryTarget):
A target specified by a query.
- documents (~.firestore.Target.DocumentsTarget):
+ documents (google.cloud.firestore_v1.types.Target.DocumentsTarget):
A target specified by a set of document
names.
resume_token (bytes):
@@ -858,7 +858,7 @@ class Target(proto.Message):
Using a resume token with a different target is unsupported
and may fail.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
Start listening after a specific ``read_time``.
The client must know the state of matching documents at this
@@ -898,7 +898,7 @@ class QueryTarget(proto.Message):
For example:
``projects/my-project/databases/my-database/documents`` or
``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom``
- structured_query (~.gf_query.StructuredQuery):
+ structured_query (google.cloud.firestore_v1.types.StructuredQuery):
A structured query.
"""
@@ -934,14 +934,14 @@ class TargetChange(proto.Message):
r"""Targets being watched have changed.
Attributes:
- target_change_type (~.firestore.TargetChange.TargetChangeType):
+ target_change_type (google.cloud.firestore_v1.types.TargetChange.TargetChangeType):
The type of change that occurred.
target_ids (Sequence[int]):
The target IDs of targets that have changed.
If empty, the change applies to all targets.
The order of the target IDs is not defined.
- cause (~.gr_status.Status):
+ cause (google.rpc.status_pb2.Status):
The error that resulted in this change, if
applicable.
resume_token (bytes):
@@ -949,7 +949,7 @@ class TargetChange(proto.Message):
``target_ids``, or all targets if ``target_ids`` is empty.
Not set on every target change.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
The consistent ``read_time`` for the given ``target_ids``
(omitted when the target_ids are not at a consistent
snapshot).
@@ -1036,13 +1036,13 @@ class BatchWriteRequest(proto.Message):
database (str):
Required. The database name. In the format:
``projects/{project_id}/databases/{database_id}``.
- writes (Sequence[~.write.Write]):
+ writes (Sequence[google.cloud.firestore_v1.types.Write]):
The writes to apply.
Method does not apply writes atomically and does
not guarantee ordering. Each write succeeds or
fails independently. You cannot write to the
same document more than once per request.
- labels (Sequence[~.firestore.BatchWriteRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.firestore_v1.types.BatchWriteRequest.LabelsEntry]):
Labels associated with this batch write.
"""
@@ -1058,11 +1058,11 @@ class BatchWriteResponse(proto.Message):
[Firestore.BatchWrite][google.firestore.v1.Firestore.BatchWrite].
Attributes:
- write_results (Sequence[~.write.WriteResult]):
+ write_results (Sequence[google.cloud.firestore_v1.types.WriteResult]):
The result of applying the writes.
This i-th write result corresponds to the i-th
write in the request.
- status (Sequence[~.gr_status.Status]):
+ status (Sequence[google.rpc.status_pb2.Status]):
The status of applying the writes.
This i-th write status corresponds to the i-th
write in the request.
diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py
index 8a65a3623a..2105e0d24a 100644
--- a/google/cloud/firestore_v1/types/query.py
+++ b/google/cloud/firestore_v1/types/query.py
@@ -31,13 +31,13 @@ class StructuredQuery(proto.Message):
r"""A Firestore query.
Attributes:
- select (~.query.StructuredQuery.Projection):
+ select (google.cloud.firestore_v1.types.StructuredQuery.Projection):
The projection to return.
- from_ (Sequence[~.query.StructuredQuery.CollectionSelector]):
+ from_ (Sequence[google.cloud.firestore_v1.types.StructuredQuery.CollectionSelector]):
The collections to query.
- where (~.query.StructuredQuery.Filter):
+ where (google.cloud.firestore_v1.types.StructuredQuery.Filter):
The filter to apply.
- order_by (Sequence[~.query.StructuredQuery.Order]):
+ order_by (Sequence[google.cloud.firestore_v1.types.StructuredQuery.Order]):
The order to apply to the query results.
Firestore guarantees a stable ordering through the following
@@ -59,15 +59,15 @@ class StructuredQuery(proto.Message):
``SELECT * FROM Foo ORDER BY A DESC, __name__ DESC``
- ``SELECT * FROM Foo WHERE A > 1`` becomes
``SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__``
- start_at (~.query.Cursor):
+ start_at (google.cloud.firestore_v1.types.Cursor):
A starting point for the query results.
- end_at (~.query.Cursor):
+ end_at (google.cloud.firestore_v1.types.Cursor):
A end point for the query results.
offset (int):
The number of results to skip.
Applies before limit, but after all other
constraints. Must be >= 0 if specified.
- limit (~.wrappers.Int32Value):
+ limit (google.protobuf.wrappers_pb2.Int32Value):
The maximum number of results to return.
Applies after all other constraints.
Must be >= 0 if specified.
@@ -101,11 +101,11 @@ class Filter(proto.Message):
r"""A filter.
Attributes:
- composite_filter (~.query.StructuredQuery.CompositeFilter):
+ composite_filter (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter):
A composite filter.
- field_filter (~.query.StructuredQuery.FieldFilter):
+ field_filter (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter):
A filter on a document field.
- unary_filter (~.query.StructuredQuery.UnaryFilter):
+ unary_filter (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter):
A filter that takes exactly one argument.
"""
@@ -135,9 +135,9 @@ class CompositeFilter(proto.Message):
operator.
Attributes:
- op (~.query.StructuredQuery.CompositeFilter.Operator):
+ op (google.cloud.firestore_v1.types.StructuredQuery.CompositeFilter.Operator):
The operator for combining multiple filters.
- filters (Sequence[~.query.StructuredQuery.Filter]):
+ filters (Sequence[google.cloud.firestore_v1.types.StructuredQuery.Filter]):
The list of filters to combine.
Must contain at least one filter.
"""
@@ -159,11 +159,11 @@ class FieldFilter(proto.Message):
r"""A filter on a specific field.
Attributes:
- field (~.query.StructuredQuery.FieldReference):
+ field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference):
The field to filter by.
- op (~.query.StructuredQuery.FieldFilter.Operator):
+ op (google.cloud.firestore_v1.types.StructuredQuery.FieldFilter.Operator):
The operator to filter by.
- value (~.document.Value):
+ value (google.cloud.firestore_v1.types.Value):
The value to compare to.
"""
@@ -195,9 +195,9 @@ class UnaryFilter(proto.Message):
r"""A filter with a single operand.
Attributes:
- op (~.query.StructuredQuery.UnaryFilter.Operator):
+ op (google.cloud.firestore_v1.types.StructuredQuery.UnaryFilter.Operator):
The unary operator to apply.
- field (~.query.StructuredQuery.FieldReference):
+ field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference):
The field to which to apply the operator.
"""
@@ -224,9 +224,9 @@ class Order(proto.Message):
r"""An order on a field.
Attributes:
- field (~.query.StructuredQuery.FieldReference):
+ field (google.cloud.firestore_v1.types.StructuredQuery.FieldReference):
The field to order by.
- direction (~.query.StructuredQuery.Direction):
+ direction (google.cloud.firestore_v1.types.StructuredQuery.Direction):
The direction to order by. Defaults to ``ASCENDING``.
"""
@@ -250,7 +250,7 @@ class Projection(proto.Message):
r"""The projection of document's fields to return.
Attributes:
- fields (Sequence[~.query.StructuredQuery.FieldReference]):
+ fields (Sequence[google.cloud.firestore_v1.types.StructuredQuery.FieldReference]):
The fields to return.
If empty, all fields are returned. To only return the name
@@ -282,7 +282,7 @@ class Cursor(proto.Message):
r"""A position in a query result set.
Attributes:
- values (Sequence[~.document.Value]):
+ values (Sequence[google.cloud.firestore_v1.types.Value]):
The values that represent a position, in the
order they appear in the order by clause of a
query.
diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py
index 6b3f49b530..06c715292e 100644
--- a/google/cloud/firestore_v1/types/write.py
+++ b/google/cloud/firestore_v1/types/write.py
@@ -41,14 +41,14 @@ class Write(proto.Message):
r"""A write on a document.
Attributes:
- update (~.gf_document.Document):
+ update (google.cloud.firestore_v1.types.Document):
A document to write.
delete (str):
A document name to delete. In the format:
``projects/{project_id}/databases/{database_id}/documents/{document_path}``.
- transform (~.write.DocumentTransform):
+ transform (google.cloud.firestore_v1.types.DocumentTransform):
Applies a transformation to a document.
- update_mask (~.common.DocumentMask):
+ update_mask (google.cloud.firestore_v1.types.DocumentMask):
The fields to update in this write.
This field can be set only when the operation is ``update``.
@@ -59,14 +59,14 @@ class Write(proto.Message):
the mask, but not present in the input document, are deleted
from the document on the server. The field paths in this
mask must not contain a reserved field name.
- update_transforms (Sequence[~.write.DocumentTransform.FieldTransform]):
+ update_transforms (Sequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]):
The transforms to perform after update.
This field can be set only when the operation is ``update``.
If present, this write is equivalent to performing
``update`` and ``transform`` to the same document atomically
and in order.
- current_document (~.common.Precondition):
+ current_document (google.cloud.firestore_v1.types.Precondition):
An optional precondition on the document.
The write will fail if this is set and not met
by the target document.
@@ -99,7 +99,7 @@ class DocumentTransform(proto.Message):
Attributes:
document (str):
The name of the document to transform.
- field_transforms (Sequence[~.write.DocumentTransform.FieldTransform]):
+ field_transforms (Sequence[google.cloud.firestore_v1.types.DocumentTransform.FieldTransform]):
The list of transformations to apply to the
fields of the document, in order.
This must not be empty.
@@ -113,9 +113,9 @@ class FieldTransform(proto.Message):
The path of the field. See
[Document.fields][google.firestore.v1.Document.fields] for
the field path syntax reference.
- set_to_server_value (~.write.DocumentTransform.FieldTransform.ServerValue):
+ set_to_server_value (google.cloud.firestore_v1.types.DocumentTransform.FieldTransform.ServerValue):
Sets the field to the given server value.
- increment (~.gf_document.Value):
+ increment (google.cloud.firestore_v1.types.Value):
Adds the given value to the field's current
value.
This must be an integer or a double value.
@@ -129,7 +129,7 @@ class FieldTransform(proto.Message):
there is positive/negative integer overflow, the
field is resolved to the largest magnitude
positive/negative integer.
- maximum (~.gf_document.Value):
+ maximum (google.cloud.firestore_v1.types.Value):
Sets the field to the maximum of its current
value and the given value.
This must be an integer or a double value.
@@ -146,7 +146,7 @@ class FieldTransform(proto.Message):
zero input value is always the stored value.
The maximum of any numeric value x and NaN is
NaN.
- minimum (~.gf_document.Value):
+ minimum (google.cloud.firestore_v1.types.Value):
Sets the field to the minimum of its current
value and the given value.
This must be an integer or a double value.
@@ -163,7 +163,7 @@ class FieldTransform(proto.Message):
zero input value is always the stored value.
The minimum of any numeric value x and NaN is
NaN.
- append_missing_elements (~.gf_document.ArrayValue):
+ append_missing_elements (google.cloud.firestore_v1.types.ArrayValue):
Append the given elements in order if they are not already
present in the current field value. If the field is not an
array, or if the field does not yet exist, it is first set
@@ -176,7 +176,7 @@ class FieldTransform(proto.Message):
considered.
The corresponding transform_result will be the null value.
- remove_all_from_array (~.gf_document.ArrayValue):
+ remove_all_from_array (google.cloud.firestore_v1.types.ArrayValue):
Remove all of the given elements from the array in the
field. If the field is not an array, or if the field does
not yet exist, it is set to the empty array.
@@ -241,13 +241,13 @@ class WriteResult(proto.Message):
r"""The result of applying a write.
Attributes:
- update_time (~.timestamp.Timestamp):
+ update_time (google.protobuf.timestamp_pb2.Timestamp):
The last update time of the document after applying the
write. Not set after a ``delete``.
If the write did not actually change the document, this will
be the previous update_time.
- transform_results (Sequence[~.gf_document.Value]):
+ transform_results (Sequence[google.cloud.firestore_v1.types.Value]):
The results of applying each
[DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform],
in the same order.
@@ -272,7 +272,7 @@ class DocumentChange(proto.Message):
targets are affected.
Attributes:
- document (~.gf_document.Document):
+ document (google.cloud.firestore_v1.types.Document):
The new state of the
[Document][google.firestore.v1.Document].
@@ -311,7 +311,7 @@ class DocumentDelete(proto.Message):
removed_target_ids (Sequence[int]):
A set of target IDs for targets that
previously matched this entity.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
The read timestamp at which the delete was observed.
Greater or equal to the ``commit_time`` of the delete.
@@ -344,7 +344,7 @@ class DocumentRemove(proto.Message):
removed_target_ids (Sequence[int]):
A set of target IDs for targets that
previously matched this document.
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
The read timestamp at which the remove was observed.
Greater or equal to the ``commit_time`` of the
diff --git a/noxfile.py b/noxfile.py
index 23b817126c..db0b94b745 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -31,6 +31,17 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.7"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
@@ -84,15 +95,16 @@ def default(session):
session.install(
"mock", "pytest", "pytest-cov",
)
+
session.install("-e", ".")
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
- "--cov=google.cloud.firestore",
- "--cov=google.cloud",
- "--cov=tests.unit",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google/cloud",
+ "--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
@@ -120,6 +132,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -139,9 +154,21 @@ def system(session):
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--verbose", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--verbose", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
diff --git a/renovate.json b/renovate.json
index 4fa949311b..f08bc22c9a 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,6 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"]
}
diff --git a/setup.py b/setup.py
index 8f86c45297..e8356363b7 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-firestore"
description = "Google Cloud Firestore API client library"
-version = "2.0.2"
+version = "2.1.0"
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
"google-api-core[grpc] >= 1.22.1, < 2.0.0dev",
diff --git a/synth.metadata b/synth.metadata
index cd18028657..9fb8e0f6d9 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,23 +3,23 @@
{
"git": {
"name": ".",
- "remote": "git@github.com:googleapis/python-firestore",
- "sha": "ab19546ee96c69f46519764a3fb0eb4bea4fc6f8"
+ "remote": "https://github.com/googleapis/python-firestore.git",
+ "sha": "e57258c51e4b4aa664cc927454056412756fc7ac"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "0c9e3f8cb3a0c75983fe9a7897f0ef048d81e999",
- "internalRef": "342123525"
+ "sha": "149a3a84c29c9b8189576c7442ccb6dcf6a8f95b",
+ "internalRef": "364411656"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "e89175cf074dccc4babb4eca66ae913696e47a71"
+ "sha": "2c54c473779ea731128cea61a3a6c975a08a5378"
}
}
],
@@ -41,6 +41,15 @@
"language": "python",
"generator": "bazel"
}
+ },
+ {
+ "client": {
+ "source": "googleapis",
+ "apiName": "firestore-bundle",
+ "apiVersion": "v1",
+ "language": "python",
+ "generator": "bazel"
+ }
}
],
"generatedFiles": [
@@ -51,6 +60,7 @@
".github/ISSUE_TEMPLATE/feature_request.md",
".github/ISSUE_TEMPLATE/support_request.md",
".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/header-checker-lint.yml",
".github/release-please.yml",
".github/snippet-bot.yml",
".gitignore",
@@ -88,6 +98,7 @@
".kokoro/test-samples.sh",
".kokoro/trampoline.sh",
".kokoro/trampoline_v2.sh",
+ ".pre-commit-config.yaml",
".trampolinerc",
"CODE_OF_CONDUCT.md",
"CONTRIBUTING.rst",
@@ -114,6 +125,11 @@
"google/cloud/firestore_admin_v1/types/index.py",
"google/cloud/firestore_admin_v1/types/location.py",
"google/cloud/firestore_admin_v1/types/operation.py",
+ "google/cloud/firestore_bundle/__init__.py",
+ "google/cloud/firestore_bundle/py.typed",
+ "google/cloud/firestore_bundle/services/__init__.py",
+ "google/cloud/firestore_bundle/types/__init__.py",
+ "google/cloud/firestore_bundle/types/bundle.py",
"google/cloud/firestore_v1/py.typed",
"google/cloud/firestore_v1/services/__init__.py",
"google/cloud/firestore_v1/services/firestore/__init__.py",
@@ -145,6 +161,7 @@
"scripts/readme-gen/templates/install_portaudio.tmpl.rst",
"setup.cfg",
"testing/.gitignore",
+ "tests/unit/gapic/bundle/__init__.py",
"tests/unit/gapic/firestore_admin_v1/__init__.py",
"tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py",
"tests/unit/gapic/firestore_v1/__init__.py",
diff --git a/synth.py b/synth.py
index 169eb7c7de..b4fa231531 100644
--- a/synth.py
+++ b/synth.py
@@ -66,6 +66,23 @@
s.move(library / "scripts")
+# ----------------------------------------------------------------------------
+# Generate firestore bundle GAPIC layer
+# ----------------------------------------------------------------------------
+for version in ["v1"]:
+ library = gapic.py_library(
+ service="firestore-bundle",
+ version=version,
+ proto_path='google/firestore/bundle',
+ bazel_target=f"//google/firestore/bundle:firestore-bundle-py",
+ )
+ s.move(
+ library / f"google/cloud/bundle",
+ f"google/cloud/firestore_bundle",
+ )
+ s.move(library / f"tests", f"tests")
+
+
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
@@ -105,6 +122,19 @@
""",
)
+s.replace(
+ ".gitignore",
+ """\
+pylintrc
+pylintrc.test
+""",
+ """\
+pylintrc
+pylintrc.test
+.make/**
+""",
+)
+
s.replace(
"setup.cfg",
"""\
@@ -191,6 +221,18 @@ def lint_setup_py(session):
""",
)
+s.replace(
+ "google/cloud/firestore_bundle/types/bundle.py",
+ "from google.firestore.v1 import document_pb2 as gfv_document # type: ignore\n",
+ "from google.cloud.firestore_v1.types import document as gfv_document\n",
+)
+
+s.replace(
+ "google/cloud/firestore_bundle/types/bundle.py",
+ "from google.firestore.v1 import query_pb2 as query # type: ignore\n",
+ "from google.cloud.firestore_v1.types import query\n",
+)
+
s.replace(
".coveragerc",
"""\
@@ -205,6 +247,18 @@ def lint_setup_py(session):
""",
)
+s.replace(
+ "google/cloud/firestore_bundle/__init__.py",
+ "from .types.bundle import NamedQuery\n",
+ "from .types.bundle import NamedQuery\n\nfrom .bundle import FirestoreBundle\n",
+)
+
+s.replace(
+ "google/cloud/firestore_bundle/__init__.py",
+ "\'BundledQuery\',",
+ "\"BundledQuery\",\n \"FirestoreBundle\",",
+)
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
s.replace(
@@ -216,3 +270,22 @@ def lint_setup_py(session):
# Setup service account credentials.""",
)
+
+
+# Add a section on updating conformance tests to contributing.
+s.replace(
+ "CONTRIBUTING.rst",
+ "\nTest Coverage",
+ """*************
+Updating Conformance Tests
+**************************
+
+The firestore client libraries use a shared set of conformance tests, the source of which can be found at https://github.com/googleapis/conformance-tests.
+
+To update the copy of these conformance tests used by this repository, run the provided Makefile:
+
+ $ make -f Makefile_v1
+
+*************
+Test Coverage"""
+)
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
index 355c5aebb8..6d4471461c 100644
--- a/tests/system/test_system.py
+++ b/tests/system/test_system.py
@@ -1186,6 +1186,32 @@ def on_snapshot(docs, changes, read_time):
)
+def test_array_union(client, cleanup):
+ doc_ref = client.document("gcp-7523", "test-document")
+ cleanup(doc_ref.delete)
+ doc_ref.delete()
+ tree_1 = {"forest": {"tree-1": "oak"}}
+ tree_2 = {"forest": {"tree-2": "pine"}}
+ tree_3 = {"forest": {"tree-3": firestore.ArrayUnion(["spruce"])}}
+
+ doc_ref.set(tree_1)
+ expected = tree_1.copy()
+ assert doc_ref.get().to_dict() == expected
+
+ doc_ref.set(tree_2, merge=True)
+ expected["forest"]["tree-2"] = tree_2["forest"]["tree-2"]
+ assert doc_ref.get().to_dict() == expected
+
+ doc_ref.set(tree_3, merge=True)
+ expected["forest"]["tree-3"] = ["spruce"]
+ assert doc_ref.get().to_dict() == expected
+
+ tree_3_part_2 = {"forest": {"tree-3": firestore.ArrayUnion(["palm"])}}
+ expected["forest"]["tree-3"].append("palm")
+ doc_ref.set(tree_3_part_2, merge=True)
+ assert doc_ref.get().to_dict() == expected
+
+
def test_watch_query_order(client, cleanup):
db = client
collection_ref = db.collection("users")
diff --git a/tests/unit/gapic/bundle/__init__.py b/tests/unit/gapic/bundle/__init__.py
new file mode 100644
index 0000000000..42ffdf2bc4
--- /dev/null
+++ b/tests/unit/gapic/bundle/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/firestore_admin_v1/__init__.py b/tests/unit/gapic/firestore_admin_v1/__init__.py
index 8b13789179..42ffdf2bc4 100644
--- a/tests/unit/gapic/firestore_admin_v1/__init__.py
+++ b/tests/unit/gapic/firestore_admin_v1/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py
index 093662c492..b7d6e48dd1 100644
--- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py
+++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py
@@ -99,7 +99,24 @@ def test__get_default_mtls_endpoint():
@pytest.mark.parametrize(
- "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient]
+ "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient,]
+)
+def test_firestore_admin_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "firestore.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [FirestoreAdminClient, FirestoreAdminAsyncClient,]
)
def test_firestore_admin_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
@@ -109,16 +126,21 @@ def test_firestore_admin_client_from_service_account_file(client_class):
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "firestore.googleapis.com:443"
def test_firestore_admin_client_get_transport_class():
transport = FirestoreAdminClient.get_transport_class()
- assert transport == transports.FirestoreAdminGrpcTransport
+ available_transports = [
+ transports.FirestoreAdminGrpcTransport,
+ ]
+ assert transport in available_transports
transport = FirestoreAdminClient.get_transport_class("grpc")
assert transport == transports.FirestoreAdminGrpcTransport
@@ -169,7 +191,7 @@ def test_firestore_admin_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -185,7 +207,7 @@ def test_firestore_admin_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -201,7 +223,7 @@ def test_firestore_admin_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -229,7 +251,7 @@ def test_firestore_admin_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -280,29 +302,25 @@ def test_firestore_admin_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -311,66 +329,53 @@ def test_firestore_admin_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -396,7 +401,7 @@ def test_firestore_admin_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -426,7 +431,7 @@ def test_firestore_admin_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -445,7 +450,7 @@ def test_firestore_admin_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -483,6 +488,22 @@ def test_create_index_from_dict():
test_create_index(request_type=dict)
+def test_create_index_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_index), "__call__") as call:
+ client.create_index()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.CreateIndexRequest()
+
+
@pytest.mark.asyncio
async def test_create_index_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.CreateIndexRequest
@@ -686,6 +707,22 @@ def test_list_indexes_from_dict():
test_list_indexes(request_type=dict)
+def test_list_indexes_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_indexes), "__call__") as call:
+ client.list_indexes()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ListIndexesRequest()
+
+
@pytest.mark.asyncio
async def test_list_indexes_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.ListIndexesRequest
@@ -1009,6 +1046,22 @@ def test_get_index_from_dict():
test_get_index(request_type=dict)
+def test_get_index_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_index), "__call__") as call:
+ client.get_index()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.GetIndexRequest()
+
+
@pytest.mark.asyncio
async def test_get_index_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.GetIndexRequest
@@ -1201,6 +1254,22 @@ def test_delete_index_from_dict():
test_delete_index(request_type=dict)
+def test_delete_index_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_index), "__call__") as call:
+ client.delete_index()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.DeleteIndexRequest()
+
+
@pytest.mark.asyncio
async def test_delete_index_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.DeleteIndexRequest
@@ -1384,6 +1453,22 @@ def test_get_field_from_dict():
test_get_field(request_type=dict)
+def test_get_field_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_field), "__call__") as call:
+ client.get_field()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.GetFieldRequest()
+
+
@pytest.mark.asyncio
async def test_get_field_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.GetFieldRequest
@@ -1568,6 +1653,22 @@ def test_update_field_from_dict():
test_update_field(request_type=dict)
+def test_update_field_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_field), "__call__") as call:
+ client.update_field()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.UpdateFieldRequest()
+
+
@pytest.mark.asyncio
async def test_update_field_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.UpdateFieldRequest
@@ -1761,6 +1862,22 @@ def test_list_fields_from_dict():
test_list_fields(request_type=dict)
+def test_list_fields_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_fields), "__call__") as call:
+ client.list_fields()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ListFieldsRequest()
+
+
@pytest.mark.asyncio
async def test_list_fields_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.ListFieldsRequest
@@ -2063,6 +2180,22 @@ def test_export_documents_from_dict():
test_export_documents(request_type=dict)
+def test_export_documents_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.export_documents), "__call__") as call:
+ client.export_documents()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ExportDocumentsRequest()
+
+
@pytest.mark.asyncio
async def test_export_documents_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.ExportDocumentsRequest
@@ -2249,6 +2382,22 @@ def test_import_documents_from_dict():
test_import_documents(request_type=dict)
+def test_import_documents_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreAdminClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.import_documents), "__call__") as call:
+ client.import_documents()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore_admin.ImportDocumentsRequest()
+
+
@pytest.mark.asyncio
async def test_import_documents_async(
transport: str = "grpc_asyncio", request_type=firestore_admin.ImportDocumentsRequest
@@ -2584,6 +2733,54 @@ def test_firestore_admin_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.FirestoreAdminGrpcTransport,
+ transports.FirestoreAdminGrpcAsyncIOTransport,
+ ],
+)
+def test_firestore_admin_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_firestore_admin_host_no_port():
client = FirestoreAdminClient(
credentials=credentials.AnonymousCredentials(),
@@ -2605,7 +2802,7 @@ def test_firestore_admin_host_with_port():
def test_firestore_admin_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.FirestoreAdminGrpcTransport(
@@ -2617,7 +2814,7 @@ def test_firestore_admin_grpc_transport_channel():
def test_firestore_admin_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.FirestoreAdminGrpcAsyncIOTransport(
@@ -2628,6 +2825,8 @@ def test_firestore_admin_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2642,7 +2841,7 @@ def test_firestore_admin_transport_channel_mtls_with_client_cert_source(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -2674,11 +2873,17 @@ def test_firestore_admin_transport_channel_mtls_with_client_cert_source(
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2694,7 +2899,7 @@ def test_firestore_admin_transport_channel_mtls_with_adc(transport_class):
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
@@ -2718,6 +2923,10 @@ def test_firestore_admin_transport_channel_mtls_with_adc(transport_class):
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
diff --git a/tests/unit/gapic/firestore_v1/__init__.py b/tests/unit/gapic/firestore_v1/__init__.py
index 8b13789179..42ffdf2bc4 100644
--- a/tests/unit/gapic/firestore_v1/__init__.py
+++ b/tests/unit/gapic/firestore_v1/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py
index 13891e6022..2ff7e01f1c 100644
--- a/tests/unit/gapic/firestore_v1/test_firestore.py
+++ b/tests/unit/gapic/firestore_v1/test_firestore.py
@@ -89,7 +89,22 @@ def test__get_default_mtls_endpoint():
assert FirestoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
-@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient])
+@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient,])
+def test_firestore_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "firestore.googleapis.com:443"
+
+
+@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient,])
def test_firestore_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
@@ -98,16 +113,21 @@ def test_firestore_client_from_service_account_file(client_class):
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "firestore.googleapis.com:443"
def test_firestore_client_get_transport_class():
transport = FirestoreClient.get_transport_class()
- assert transport == transports.FirestoreGrpcTransport
+ available_transports = [
+ transports.FirestoreGrpcTransport,
+ ]
+ assert transport in available_transports
transport = FirestoreClient.get_transport_class("grpc")
assert transport == transports.FirestoreGrpcTransport
@@ -154,7 +174,7 @@ def test_firestore_client_client_options(client_class, transport_class, transpor
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -170,7 +190,7 @@ def test_firestore_client_client_options(client_class, transport_class, transpor
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -186,7 +206,7 @@ def test_firestore_client_client_options(client_class, transport_class, transpor
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -214,7 +234,7 @@ def test_firestore_client_client_options(client_class, transport_class, transpor
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -263,29 +283,25 @@ def test_firestore_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -294,66 +310,53 @@ def test_firestore_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -379,7 +382,7 @@ def test_firestore_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -409,7 +412,7 @@ def test_firestore_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -426,7 +429,7 @@ def test_firestore_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -467,6 +470,22 @@ def test_get_document_from_dict():
test_get_document(request_type=dict)
+def test_get_document_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_document), "__call__") as call:
+ client.get_document()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.GetDocumentRequest()
+
+
@pytest.mark.asyncio
async def test_get_document_async(
transport: str = "grpc_asyncio", request_type=firestore.GetDocumentRequest
@@ -591,6 +610,22 @@ def test_list_documents_from_dict():
test_list_documents(request_type=dict)
+def test_list_documents_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_documents), "__call__") as call:
+ client.list_documents()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.ListDocumentsRequest()
+
+
@pytest.mark.asyncio
async def test_list_documents_async(
transport: str = "grpc_asyncio", request_type=firestore.ListDocumentsRequest
@@ -853,6 +888,22 @@ def test_update_document_from_dict():
test_update_document(request_type=dict)
+def test_update_document_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_document), "__call__") as call:
+ client.update_document()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.UpdateDocumentRequest()
+
+
@pytest.mark.asyncio
async def test_update_document_async(
transport: str = "grpc_asyncio", request_type=firestore.UpdateDocumentRequest
@@ -1063,6 +1114,22 @@ def test_delete_document_from_dict():
test_delete_document(request_type=dict)
+def test_delete_document_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_document), "__call__") as call:
+ client.delete_document()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.DeleteDocumentRequest()
+
+
@pytest.mark.asyncio
async def test_delete_document_async(
transport: str = "grpc_asyncio", request_type=firestore.DeleteDocumentRequest
@@ -1246,6 +1313,24 @@ def test_batch_get_documents_from_dict():
test_batch_get_documents(request_type=dict)
+def test_batch_get_documents_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_get_documents), "__call__"
+ ) as call:
+ client.batch_get_documents()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.BatchGetDocumentsRequest()
+
+
@pytest.mark.asyncio
async def test_batch_get_documents_async(
transport: str = "grpc_asyncio", request_type=firestore.BatchGetDocumentsRequest
@@ -1381,6 +1466,24 @@ def test_begin_transaction_from_dict():
test_begin_transaction(request_type=dict)
+def test_begin_transaction_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.begin_transaction), "__call__"
+ ) as call:
+ client.begin_transaction()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.BeginTransactionRequest()
+
+
@pytest.mark.asyncio
async def test_begin_transaction_async(
transport: str = "grpc_asyncio", request_type=firestore.BeginTransactionRequest
@@ -1578,6 +1681,22 @@ def test_commit_from_dict():
test_commit(request_type=dict)
+def test_commit_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.commit), "__call__") as call:
+ client.commit()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.CommitRequest()
+
+
@pytest.mark.asyncio
async def test_commit_async(
transport: str = "grpc_asyncio", request_type=firestore.CommitRequest
@@ -1780,6 +1899,22 @@ def test_rollback_from_dict():
test_rollback(request_type=dict)
+def test_rollback_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.rollback), "__call__") as call:
+ client.rollback()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.RollbackRequest()
+
+
@pytest.mark.asyncio
async def test_rollback_async(
transport: str = "grpc_asyncio", request_type=firestore.RollbackRequest
@@ -1971,6 +2106,22 @@ def test_run_query_from_dict():
test_run_query(request_type=dict)
+def test_run_query_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_query), "__call__") as call:
+ client.run_query()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.RunQueryRequest()
+
+
@pytest.mark.asyncio
async def test_run_query_async(
transport: str = "grpc_asyncio", request_type=firestore.RunQueryRequest
@@ -2098,6 +2249,22 @@ def test_partition_query_from_dict():
test_partition_query(request_type=dict)
+def test_partition_query_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.partition_query), "__call__") as call:
+ client.partition_query()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.PartitionQueryRequest()
+
+
@pytest.mark.asyncio
async def test_partition_query_async(
transport: str = "grpc_asyncio", request_type=firestore.PartitionQueryRequest
@@ -2495,6 +2662,24 @@ def test_list_collection_ids_from_dict():
test_list_collection_ids(request_type=dict)
+def test_list_collection_ids_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_collection_ids), "__call__"
+ ) as call:
+ client.list_collection_ids()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.ListCollectionIdsRequest()
+
+
@pytest.mark.asyncio
async def test_list_collection_ids_async(
transport: str = "grpc_asyncio", request_type=firestore.ListCollectionIdsRequest
@@ -2823,6 +3008,22 @@ def test_batch_write_from_dict():
test_batch_write(request_type=dict)
+def test_batch_write_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.batch_write), "__call__") as call:
+ client.batch_write()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.BatchWriteRequest()
+
+
@pytest.mark.asyncio
async def test_batch_write_async(
transport: str = "grpc_asyncio", request_type=firestore.BatchWriteRequest
@@ -2945,6 +3146,22 @@ def test_create_document_from_dict():
test_create_document(request_type=dict)
+def test_create_document_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = FirestoreClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_document), "__call__") as call:
+ client.create_document()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == firestore.CreateDocumentRequest()
+
+
@pytest.mark.asyncio
async def test_create_document_async(
transport: str = "grpc_asyncio", request_type=firestore.CreateDocumentRequest
@@ -3088,7 +3305,7 @@ def test_transport_get_channel():
@pytest.mark.parametrize(
"transport_class",
- [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
+ [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport,],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
@@ -3211,6 +3428,51 @@ def test_firestore_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
+)
+def test_firestore_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/datastore",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_firestore_host_no_port():
client = FirestoreClient(
credentials=credentials.AnonymousCredentials(),
@@ -3232,7 +3494,7 @@ def test_firestore_host_with_port():
def test_firestore_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.FirestoreGrpcTransport(
@@ -3244,7 +3506,7 @@ def test_firestore_grpc_transport_channel():
def test_firestore_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.FirestoreGrpcAsyncIOTransport(
@@ -3255,6 +3517,8 @@ def test_firestore_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
@@ -3264,7 +3528,7 @@ def test_firestore_transport_channel_mtls_with_client_cert_source(transport_clas
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -3296,11 +3560,17 @@ def test_firestore_transport_channel_mtls_with_client_cert_source(transport_clas
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.FirestoreGrpcTransport, transports.FirestoreGrpcAsyncIOTransport],
@@ -3313,7 +3583,7 @@ def test_firestore_transport_channel_mtls_with_adc(transport_class):
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
@@ -3337,6 +3607,10 @@ def test_firestore_transport_channel_mtls_with_adc(transport_class):
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
)
assert transport.grpc_channel == mock_grpc_channel
diff --git a/tests/unit/v1/_test_helpers.py b/tests/unit/v1/_test_helpers.py
new file mode 100644
index 0000000000..65aece0d4d
--- /dev/null
+++ b/tests/unit/v1/_test_helpers.py
@@ -0,0 +1,84 @@
+# Copyright 2021 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import mock
+import typing
+
+import google
+from google.cloud.firestore_v1.base_client import BaseClient
+from google.cloud.firestore_v1.document import DocumentReference, DocumentSnapshot
+from google.cloud._helpers import _datetime_to_pb_timestamp, UTC # type: ignore
+from google.cloud.firestore_v1._helpers import build_timestamp
+from google.cloud.firestore_v1.async_client import AsyncClient
+from google.cloud.firestore_v1.client import Client
+from google.protobuf.timestamp_pb2 import Timestamp # type: ignore
+
+
+def make_test_credentials() -> google.auth.credentials.Credentials: # type: ignore
+ import google.auth.credentials # type: ignore
+
+ return mock.Mock(spec=google.auth.credentials.Credentials)
+
+
+def make_client(project_name: typing.Optional[str] = None) -> Client:
+ return Client(
+ project=project_name or "project-project", credentials=make_test_credentials(),
+ )
+
+
+def make_async_client() -> AsyncClient:
+ return AsyncClient(project="project-project", credentials=make_test_credentials())
+
+
+def build_test_timestamp(
+ year: int = 2021,
+ month: int = 1,
+ day: int = 1,
+ hour: int = 12,
+ minute: int = 0,
+ second: int = 0,
+) -> Timestamp:
+ return _datetime_to_pb_timestamp(
+ datetime.datetime(
+ year=year,
+ month=month,
+ day=day,
+ hour=hour,
+ minute=minute,
+ second=second,
+ tzinfo=UTC,
+ ),
+ )
+
+
+def build_document_snapshot(
+ *,
+ collection_name: str = "col",
+ document_id: str = "doc",
+ client: typing.Optional[BaseClient] = None,
+ data: typing.Optional[typing.Dict] = None,
+ exists: bool = True,
+ create_time: typing.Optional[Timestamp] = None,
+ read_time: typing.Optional[Timestamp] = None,
+ update_time: typing.Optional[Timestamp] = None,
+) -> DocumentSnapshot:
+ return DocumentSnapshot(
+ DocumentReference(collection_name, document_id, client=client),
+ data or {"hello", "world"},
+ exists=exists,
+ read_time=read_time or build_timestamp(),
+ create_time=create_time or build_timestamp(),
+ update_time=update_time or build_timestamp(),
+ )
diff --git a/tests/unit/v1/conformance_tests.py b/tests/unit/v1/conformance_tests.py
index 0718f8e5f4..9254395c05 100644
--- a/tests/unit/v1/conformance_tests.py
+++ b/tests/unit/v1/conformance_tests.py
@@ -56,7 +56,7 @@ class TestFile(proto.Message):
r"""A collection of tests.
Attributes:
- tests (Sequence[~.gcf_tests.Test]):
+ tests (Sequence[google.cloud.firestore_v1.types.Test]):
"""
@@ -73,21 +73,21 @@ class Test(proto.Message):
comment (str):
a comment describing the behavior being
tested
- get (~.gcf_tests.GetTest):
+ get (google.cloud.firestore_v1.types.GetTest):
- create (~.gcf_tests.CreateTest):
+ create (google.cloud.firestore_v1.types.CreateTest):
- set_ (~.gcf_tests.SetTest):
+ set_ (google.cloud.firestore_v1.types.SetTest):
- update (~.gcf_tests.UpdateTest):
+ update (google.cloud.firestore_v1.types.UpdateTest):
- update_paths (~.gcf_tests.UpdatePathsTest):
+ update_paths (google.cloud.firestore_v1.types.UpdatePathsTest):
- delete (~.gcf_tests.DeleteTest):
+ delete (google.cloud.firestore_v1.types.DeleteTest):
- query (~.gcf_tests.QueryTest):
+ query (google.cloud.firestore_v1.types.QueryTest):
- listen (~.gcf_tests.ListenTest):
+ listen (google.cloud.firestore_v1.types.ListenTest):
"""
@@ -121,7 +121,7 @@ class GetTest(proto.Message):
doc_ref_path (str):
The path of the doc, e.g.
"projects/projectID/databases/(default)/documents/C/d".
- request (~.firestore.GetDocumentRequest):
+ request (google.cloud.firestore_v1.types.GetDocumentRequest):
The request that the call should send to the
Firestore service.
"""
@@ -146,7 +146,7 @@ class CreateTest(proto.Message):
the two special sentinel values. Values that
could be interpreted as integers (i.e. digit
strings) should be treated as integers.
- request (~.firestore.CommitRequest):
+ request (google.cloud.firestore_v1.types.CommitRequest):
The request that the call should generate.
is_error (bool):
If true, the call should result in an error
@@ -169,11 +169,11 @@ class SetTest(proto.Message):
Attributes:
doc_ref_path (str):
path of doc
- option (~.gcf_tests.SetOption):
+ option (google.cloud.firestore_v1.types.SetOption):
option to the Set call, if any
json_data (str):
data (see CreateTest.json_data)
- request (~.firestore.CommitRequest):
+ request (google.cloud.firestore_v1.types.CommitRequest):
expected request
is_error (bool):
call signals an error
@@ -197,11 +197,11 @@ class UpdateTest(proto.Message):
Attributes:
doc_ref_path (str):
path of doc
- precondition (~.common.Precondition):
+ precondition (google.cloud.firestore_v1.types.Precondition):
precondition in call, if any
json_data (str):
data (see CreateTest.json_data)
- request (~.firestore.CommitRequest):
+ request (google.cloud.firestore_v1.types.CommitRequest):
expected request
is_error (bool):
call signals an error
@@ -225,14 +225,14 @@ class UpdatePathsTest(proto.Message):
Attributes:
doc_ref_path (str):
path of doc
- precondition (~.common.Precondition):
+ precondition (google.cloud.firestore_v1.types.Precondition):
precondition in call, if any
- field_paths (Sequence[~.gcf_tests.FieldPath]):
+ field_paths (Sequence[google.cloud.firestore_v1.types.FieldPath]):
parallel sequences: field_paths[i] corresponds to
json_values[i]
json_values (Sequence[str]):
the argument values, as JSON
- request (~.firestore.CommitRequest):
+ request (google.cloud.firestore_v1.types.CommitRequest):
expected rquest
is_error (bool):
call signals an error
@@ -257,9 +257,9 @@ class DeleteTest(proto.Message):
Attributes:
doc_ref_path (str):
path of doc
- precondition (~.common.Precondition):
+ precondition (google.cloud.firestore_v1.types.Precondition):
- request (~.firestore.CommitRequest):
+ request (google.cloud.firestore_v1.types.CommitRequest):
expected rquest
is_error (bool):
call signals an error
@@ -281,7 +281,7 @@ class SetOption(proto.Message):
all_ (bool):
if true, merge all fields ("fields" is
ignored).
- fields (Sequence[~.gcf_tests.FieldPath]):
+ fields (Sequence[google.cloud.firestore_v1.types.FieldPath]):
field paths for a Merge option
"""
@@ -297,9 +297,9 @@ class QueryTest(proto.Message):
coll_path (str):
path of collection, e.g.
"projects/projectID/databases/(default)/documents/C".
- clauses (Sequence[~.gcf_tests.Clause]):
+ clauses (Sequence[google.cloud.firestore_v1.types.Clause]):
- query (~.gcf_query.StructuredQuery):
+ query (google.cloud.firestore_v1.types.StructuredQuery):
is_error (bool):
@@ -318,23 +318,23 @@ class Clause(proto.Message):
r"""
Attributes:
- select (~.gcf_tests.Select):
+ select (google.cloud.firestore_v1.types.Select):
- where (~.gcf_tests.Where):
+ where (google.cloud.firestore_v1.types.Where):
- order_by (~.gcf_tests.OrderBy):
+ order_by (google.cloud.firestore_v1.types.OrderBy):
offset (int):
limit (int):
- start_at (~.gcf_tests.Cursor_):
+ start_at (google.cloud.firestore_v1.types.Cursor_):
- start_after (~.gcf_tests.Cursor_):
+ start_after (google.cloud.firestore_v1.types.Cursor_):
- end_at (~.gcf_tests.Cursor_):
+ end_at (google.cloud.firestore_v1.types.Cursor_):
- end_before (~.gcf_tests.Cursor_):
+ end_before (google.cloud.firestore_v1.types.Cursor_):
"""
@@ -365,7 +365,7 @@ class Select(proto.Message):
r"""
Attributes:
- fields (Sequence[~.gcf_tests.FieldPath]):
+ fields (Sequence[google.cloud.firestore_v1.types.FieldPath]):
"""
@@ -376,7 +376,7 @@ class Where(proto.Message):
r"""
Attributes:
- path (~.gcf_tests.FieldPath):
+ path (google.cloud.firestore_v1.types.FieldPath):
op (str):
@@ -395,7 +395,7 @@ class OrderBy(proto.Message):
r"""
Attributes:
- path (~.gcf_tests.FieldPath):
+ path (google.cloud.firestore_v1.types.FieldPath):
direction (str):
"asc" or "desc".
@@ -410,7 +410,7 @@ class Cursor_(proto.Message):
r"""
Attributes:
- doc_snapshot (~.gcf_tests.DocSnapshot):
+ doc_snapshot (google.cloud.firestore_v1.types.DocSnapshot):
one of:
json_values (Sequence[str]):
@@ -462,9 +462,9 @@ class ListenTest(proto.Message):
in the tests before running them.
Attributes:
- responses (Sequence[~.firestore.ListenResponse]):
+ responses (Sequence[google.cloud.firestore_v1.types.ListenResponse]):
- snapshots (Sequence[~.gcf_tests.Snapshot]):
+ snapshots (Sequence[google.cloud.firestore_v1.types.Snapshot]):
is_error (bool):
@@ -483,11 +483,11 @@ class Snapshot(proto.Message):
r"""
Attributes:
- docs (Sequence[~.document.Document]):
+ docs (Sequence[google.cloud.firestore_v1.types.Document]):
- changes (Sequence[~.gcf_tests.DocChange]):
+ changes (Sequence[google.cloud.firestore_v1.types.DocChange]):
- read_time (~.timestamp.Timestamp):
+ read_time (google.protobuf.timestamp_pb2.Timestamp):
"""
@@ -502,9 +502,9 @@ class DocChange(proto.Message):
r"""
Attributes:
- kind (~.gcf_tests.DocChange.Kind):
+ kind (google.cloud.firestore_v1.types.DocChange.Kind):
- doc (~.document.Document):
+ doc (google.cloud.firestore_v1.types.Document):
old_index (int):
diff --git a/tests/unit/v1/test__helpers.py b/tests/unit/v1/test__helpers.py
index 5c4c459dbb..f558f3fe96 100644
--- a/tests/unit/v1/test__helpers.py
+++ b/tests/unit/v1/test__helpers.py
@@ -13,11 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import aiounittest
import datetime
import sys
import unittest
import mock
+import pytest
+from typing import List
class AsyncMock(mock.MagicMock):
@@ -26,10 +29,14 @@ async def __call__(self, *args, **kwargs):
class AsyncIter:
+ """Utility to help recreate the effect of an async generator. Useful when
+ you need to mock a system that requires `async for`.
+ """
+
def __init__(self, items):
self.items = items
- async def __aiter__(self, **_):
+ async def __aiter__(self):
for i in self.items:
yield i
@@ -381,6 +388,74 @@ def test_different_client(self):
self.assertEqual(exc_info.exception.args, (err_msg,))
+class TestDocumentReferenceValue(unittest.TestCase):
+ @staticmethod
+ def _call(ref_value: str):
+ from google.cloud.firestore_v1._helpers import DocumentReferenceValue
+
+ return DocumentReferenceValue(ref_value)
+
+ def test_normal(self):
+ orig = "projects/name/databases/(default)/documents/col/doc"
+ parsed = self._call(orig)
+ self.assertEqual(parsed.collection_name, "col")
+ self.assertEqual(parsed.database_name, "(default)")
+ self.assertEqual(parsed.document_id, "doc")
+
+ self.assertEqual(parsed.full_path, orig)
+ parsed._reference_value = None # type: ignore
+ self.assertEqual(parsed.full_path, orig)
+
+ def test_nested(self):
+ parsed = self._call(
+ "projects/name/databases/(default)/documents/col/doc/nested"
+ )
+ self.assertEqual(parsed.collection_name, "col")
+ self.assertEqual(parsed.database_name, "(default)")
+ self.assertEqual(parsed.document_id, "doc/nested")
+
+ def test_broken(self):
+ self.assertRaises(
+ ValueError, self._call, "projects/name/databases/(default)/documents/col",
+ )
+
+
+class Test_document_snapshot_to_protobuf(unittest.TestCase):
+ def test_real_snapshot(self):
+ from google.cloud.firestore_v1._helpers import document_snapshot_to_protobuf
+ from google.cloud.firestore_v1.types import Document
+ from google.cloud.firestore_v1.base_document import DocumentSnapshot
+ from google.cloud.firestore_v1.document import DocumentReference
+ from google.protobuf import timestamp_pb2 # type: ignore
+
+ client = _make_client()
+ snapshot = DocumentSnapshot(
+ data={"hello": "world"},
+ reference=DocumentReference("col", "doc", client=client),
+ exists=True,
+ read_time=timestamp_pb2.Timestamp(seconds=0, nanos=1),
+ update_time=timestamp_pb2.Timestamp(seconds=0, nanos=1),
+ create_time=timestamp_pb2.Timestamp(seconds=0, nanos=1),
+ )
+ self.assertIsInstance(document_snapshot_to_protobuf(snapshot), Document)
+
+ def test_non_existant_snapshot(self):
+ from google.cloud.firestore_v1._helpers import document_snapshot_to_protobuf
+ from google.cloud.firestore_v1.base_document import DocumentSnapshot
+ from google.cloud.firestore_v1.document import DocumentReference
+
+ client = _make_client()
+ snapshot = DocumentSnapshot(
+ data=None,
+ reference=DocumentReference("col", "doc", client=client),
+ exists=False,
+ read_time=None,
+ update_time=None,
+ create_time=None,
+ )
+ self.assertIsNone(document_snapshot_to_protobuf(snapshot))
+
+
class Test_decode_value(unittest.TestCase):
@staticmethod
def _call_fut(value, client=mock.sentinel.client):
@@ -2424,6 +2499,15 @@ def test_retry_and_timeout(self):
self.assertEqual(kwargs, expected)
+class TestAsyncGenerator(aiounittest.AsyncTestCase):
+ @pytest.mark.asyncio
+ async def test_async_iter(self):
+ consumed: List[int] = []
+ async for el in AsyncIter([1, 2, 3]):
+ consumed.append(el)
+ self.assertEqual(consumed, [1, 2, 3])
+
+
def _value_pb(**kwargs):
from google.cloud.firestore_v1.types.document import Value
diff --git a/tests/unit/v1/test_async_client.py b/tests/unit/v1/test_async_client.py
index 44d81d0583..b766c22fcf 100644
--- a/tests/unit/v1/test_async_client.py
+++ b/tests/unit/v1/test_async_client.py
@@ -18,7 +18,7 @@
import aiounittest
import mock
-from tests.unit.v1.test__helpers import AsyncMock, AsyncIter
+from tests.unit.v1.test__helpers import AsyncIter, AsyncMock
class TestAsyncClient(aiounittest.AsyncTestCase):
diff --git a/tests/unit/v1/test_async_collection.py b/tests/unit/v1/test_async_collection.py
index 4a2f30de10..a7b3ba0e4f 100644
--- a/tests/unit/v1/test_async_collection.py
+++ b/tests/unit/v1/test_async_collection.py
@@ -17,7 +17,7 @@
import aiounittest
import mock
-from tests.unit.v1.test__helpers import AsyncMock, AsyncIter
+from tests.unit.v1.test__helpers import AsyncIter, AsyncMock
class TestAsyncCollectionReference(aiounittest.AsyncTestCase):
@@ -252,6 +252,7 @@ async def _next_page(self):
"collection_id": collection.id,
"page_size": page_size,
"show_missing": True,
+ "mask": {"field_paths": None},
},
metadata=client._rpc_metadata,
**kwargs,
diff --git a/tests/unit/v1/test_async_document.py b/tests/unit/v1/test_async_document.py
index 606652646e..701ef5a59d 100644
--- a/tests/unit/v1/test_async_document.py
+++ b/tests/unit/v1/test_async_document.py
@@ -17,7 +17,7 @@
import aiounittest
import mock
-from tests.unit.v1.test__helpers import AsyncMock
+from tests.unit.v1.test__helpers import AsyncIter, AsyncMock
class TestAsyncDocumentReference(aiounittest.AsyncTestCase):
@@ -386,33 +386,44 @@ async def _get_helper(
field_paths=None,
use_transaction=False,
not_found=False,
+ # This should be an impossible case, but we test against it for
+ # completeness
+ return_empty=False,
retry=None,
timeout=None,
):
- from google.api_core.exceptions import NotFound
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.transaction import Transaction
# Create a minimal fake GAPIC with a dummy response.
create_time = 123
update_time = 234
- firestore_api = AsyncMock(spec=["get_document"])
- response = mock.create_autospec(document.Document)
- response.fields = {}
- response.create_time = create_time
- response.update_time = update_time
-
- if not_found:
- firestore_api.get_document.side_effect = NotFound("testing")
- else:
- firestore_api.get_document.return_value = response
+ read_time = 345
+ firestore_api = AsyncMock(spec=["batch_get_documents"])
+ response = mock.create_autospec(firestore.BatchGetDocumentsResponse)
+ response.read_time = 345
+ response.found = mock.create_autospec(document.Document)
+ response.found.fields = {}
+ response.found.create_time = create_time
+ response.found.update_time = update_time
client = _make_client("donut-base")
client._firestore_api_internal = firestore_api
+ document_reference = self._make_one("where", "we-are", client=client)
+ response.found.name = None if not_found else document_reference._document_path
+ response.missing = document_reference._document_path if not_found else None
- document = self._make_one("where", "we-are", client=client)
+ def WhichOneof(val):
+ return "missing" if not_found else "found"
+
+ response._pb = response
+ response._pb.WhichOneof = WhichOneof
+ firestore_api.batch_get_documents.return_value = AsyncIter(
+ [response] if not return_empty else []
+ )
if use_transaction:
transaction = Transaction(client)
@@ -422,21 +433,21 @@ async def _get_helper(
kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
- snapshot = await document.get(
+ snapshot = await document_reference.get(
field_paths=field_paths, transaction=transaction, **kwargs,
)
- self.assertIs(snapshot.reference, document)
- if not_found:
+ self.assertIs(snapshot.reference, document_reference)
+ if not_found or return_empty:
self.assertIsNone(snapshot._data)
self.assertFalse(snapshot.exists)
- self.assertIsNone(snapshot.read_time)
+ self.assertIsNotNone(snapshot.read_time)
self.assertIsNone(snapshot.create_time)
self.assertIsNone(snapshot.update_time)
else:
self.assertEqual(snapshot.to_dict(), {})
self.assertTrue(snapshot.exists)
- self.assertIsNone(snapshot.read_time)
+ self.assertIs(snapshot.read_time, read_time)
self.assertIs(snapshot.create_time, create_time)
self.assertIs(snapshot.update_time, update_time)
@@ -451,9 +462,10 @@ async def _get_helper(
else:
expected_transaction_id = None
- firestore_api.get_document.assert_called_once_with(
+ firestore_api.batch_get_documents.assert_called_once_with(
request={
- "name": document._document_path,
+ "database": client._database_string,
+ "documents": [document_reference._document_path],
"mask": mask,
"transaction": expected_transaction_id,
},
@@ -469,6 +481,10 @@ async def test_get_not_found(self):
async def test_get_default(self):
await self._get_helper()
+ @pytest.mark.asyncio
+ async def test_get_return_empty(self):
+ await self._get_helper(return_empty=True)
+
@pytest.mark.asyncio
async def test_get_w_retry_timeout(self):
from google.api_core.retry import Retry
diff --git a/tests/unit/v1/test_async_query.py b/tests/unit/v1/test_async_query.py
index 42514c798e..64feddaf4e 100644
--- a/tests/unit/v1/test_async_query.py
+++ b/tests/unit/v1/test_async_query.py
@@ -17,7 +17,7 @@
import aiounittest
import mock
-from tests.unit.v1.test__helpers import AsyncMock, AsyncIter
+from tests.unit.v1.test__helpers import AsyncIter, AsyncMock
from tests.unit.v1.test_base_query import (
_make_credentials,
_make_query_response,
diff --git a/tests/unit/v1/test_base_client.py b/tests/unit/v1/test_base_client.py
index 163ea33e7c..fd176d7603 100644
--- a/tests/unit/v1/test_base_client.py
+++ b/tests/unit/v1/test_base_client.py
@@ -14,6 +14,7 @@
import datetime
import unittest
+import grpc
import mock
@@ -67,10 +68,11 @@ def test__firestore_api_property(self, mock_channel, mock_client):
return_value=mock.sentinel.firestore_api,
)
@mock.patch(
- "grpc.insecure_channel", autospec=True,
+ "google.cloud.firestore_v1.base_client.BaseClient._emulator_channel",
+ autospec=True,
)
def test__firestore_api_property_with_emulator(
- self, mock_insecure_channel, mock_client
+ self, mock_emulator_channel, mock_client
):
emulator_host = "localhost:8081"
with mock.patch("os.getenv") as getenv:
@@ -82,7 +84,7 @@ def test__firestore_api_property_with_emulator(
self.assertIs(firestore_api, mock_client.return_value)
self.assertIs(firestore_api, client._firestore_api_internal)
- mock_insecure_channel.assert_called_once_with(emulator_host)
+ mock_emulator_channel.assert_called_once()
# Call again to show that it is cached, but call count is still 1.
self.assertIs(client._firestore_api, mock_client.return_value)
@@ -135,6 +137,45 @@ def test__rpc_metadata_property_with_emulator(self):
],
)
+ def test_emulator_channel(self):
+ from google.cloud.firestore_v1.services.firestore.transports.grpc import (
+ FirestoreGrpcTransport,
+ )
+ from google.cloud.firestore_v1.services.firestore.transports.grpc_asyncio import (
+ FirestoreGrpcAsyncIOTransport,
+ )
+
+ emulator_host = "localhost:8081"
+ with mock.patch("os.getenv") as getenv:
+ getenv.return_value = emulator_host
+
+ credentials = _make_credentials()
+ database = "quanta"
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, database=database
+ )
+
+ # checks that a channel is created
+ channel = client._emulator_channel(FirestoreGrpcTransport)
+ self.assertTrue(isinstance(channel, grpc.Channel))
+ channel = client._emulator_channel(FirestoreGrpcAsyncIOTransport)
+ self.assertTrue(isinstance(channel, grpc.aio.Channel))
+ # checks that the credentials are composite ones using a local channel from grpc
+ composite_credentials = client._local_composite_credentials()
+ self.assertTrue(isinstance(composite_credentials, grpc.ChannelCredentials))
+ self.assertTrue(
+ isinstance(
+ composite_credentials._credentials._call_credentialses[0],
+ grpc._cython.cygrpc.MetadataPluginCallCredentials,
+ )
+ )
+ self.assertTrue(
+ isinstance(
+ composite_credentials._credentials._channel_credentials,
+ grpc._cython.cygrpc.LocalChannelCredentials,
+ )
+ )
+
def test_field_path(self):
klass = self._get_target_class()
self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c")
diff --git a/tests/unit/v1/test_bundle.py b/tests/unit/v1/test_bundle.py
new file mode 100644
index 0000000000..4332a92fa1
--- /dev/null
+++ b/tests/unit/v1/test_bundle.py
@@ -0,0 +1,554 @@
+# -*- coding: utf-8 -*-
+#
+# # Copyright 2021 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import typing
+import unittest
+
+import mock
+from google.cloud.firestore_bundle import BundleElement, FirestoreBundle
+from google.cloud.firestore_v1 import _helpers
+from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
+from google.cloud.firestore_v1.base_query import BaseQuery
+from google.cloud.firestore_v1.collection import CollectionReference
+from google.cloud.firestore_v1.query import Query
+from google.cloud.firestore_v1.services.firestore.client import FirestoreClient
+from google.cloud.firestore_v1.types.document import Document
+from google.cloud.firestore_v1.types.firestore import RunQueryResponse
+from google.protobuf.timestamp_pb2 import Timestamp # type: ignore
+from tests.unit.v1 import _test_helpers
+from tests.unit.v1 import test__helpers
+
+
+class _CollectionQueryMixin:
+
+ # Path to each document where we don't specify custom collection names or
+ # document Ids
+ doc_key: str = "projects/project-project/databases/(default)/documents/col/doc"
+
+ @staticmethod
+ def build_results_iterable(items):
+ raise NotImplementedError()
+
+ @staticmethod
+ def get_collection_class():
+ raise NotImplementedError()
+
+ @staticmethod
+ def get_internal_client_mock():
+ raise NotImplementedError()
+
+ @staticmethod
+ def get_client():
+ raise NotImplementedError()
+
+ def _bundled_collection_helper(
+ self,
+ document_ids: typing.Optional[typing.List[str]] = None,
+ data: typing.Optional[typing.List[typing.Dict]] = None,
+ ) -> CollectionReference:
+ """Builder of a mocked Query for the sake of testing Bundles.
+
+ Bundling queries involves loading the actual documents for cold storage,
+ and this method arranges all of the necessary mocks so that unit tests
+ can think they are evaluating a live query.
+ """
+ client = self.get_client()
+ template = client._database_string + "/documents/col/{}"
+ document_ids = document_ids or ["doc-1", "doc-2"]
+
+ def _index_from_data(index: int):
+ if data is None or len(data) < index + 1:
+ return None
+ return data[index]
+
+ documents = [
+ RunQueryResponse(
+ transaction=b"",
+ document=Document(
+ name=template.format(document_id),
+ fields=_helpers.encode_dict(
+ _index_from_data(index) or {"hello": "world"}
+ ),
+ create_time=Timestamp(seconds=1, nanos=1),
+ update_time=Timestamp(seconds=1, nanos=1),
+ ),
+ read_time=_test_helpers.build_timestamp(),
+ )
+ for index, document_id in enumerate(document_ids)
+ ]
+ iterator = self.build_results_iterable(documents)
+ api_client = self.get_internal_client_mock()
+ api_client.run_query.return_value = iterator
+ client._firestore_api_internal = api_client
+ return self.get_collection_class()("col", client=client)
+
+ def _bundled_query_helper(
+ self,
+ document_ids: typing.Optional[typing.List[str]] = None,
+ data: typing.Optional[typing.List[typing.Dict]] = None,
+ ) -> BaseQuery:
+ return self._bundled_collection_helper(
+ document_ids=document_ids, data=data,
+ )._query()
+
+
+class TestBundle(_CollectionQueryMixin, unittest.TestCase):
+ @staticmethod
+ def build_results_iterable(items):
+ return iter(items)
+
+ @staticmethod
+ def get_client():
+ return _test_helpers.make_client()
+
+ @staticmethod
+ def get_internal_client_mock():
+ return mock.create_autospec(FirestoreClient)
+
+ @classmethod
+ def get_collection_class(cls):
+ return CollectionReference
+
+ def test_add_document(self):
+ bundle = FirestoreBundle("test")
+ doc = _test_helpers.build_document_snapshot(client=_test_helpers.make_client())
+ bundle.add_document(doc)
+ self.assertEqual(bundle.documents[self.doc_key].snapshot, doc)
+
+ def test_add_newer_document(self):
+ bundle = FirestoreBundle("test")
+ old_doc = _test_helpers.build_document_snapshot(
+ data={"version": 1},
+ client=_test_helpers.make_client(),
+ read_time=Timestamp(seconds=1, nanos=1),
+ )
+ bundle.add_document(old_doc)
+ self.assertEqual(bundle.documents[self.doc_key].snapshot._data["version"], 1)
+
+ # Builds the same ID by default
+ new_doc = _test_helpers.build_document_snapshot(
+ data={"version": 2},
+ client=_test_helpers.make_client(),
+ read_time=Timestamp(seconds=1, nanos=2),
+ )
+ bundle.add_document(new_doc)
+ self.assertEqual(bundle.documents[self.doc_key].snapshot._data["version"], 2)
+
+ def test_add_older_document(self):
+ bundle = FirestoreBundle("test")
+ new_doc = _test_helpers.build_document_snapshot(
+ data={"version": 2},
+ client=_test_helpers.make_client(),
+ read_time=Timestamp(seconds=1, nanos=2),
+ )
+ bundle.add_document(new_doc)
+ self.assertEqual(bundle.documents[self.doc_key].snapshot._data["version"], 2)
+
+ # Builds the same ID by default
+ old_doc = _test_helpers.build_document_snapshot(
+ data={"version": 1},
+ client=_test_helpers.make_client(),
+ read_time=Timestamp(seconds=1, nanos=1),
+ )
+ bundle.add_document(old_doc)
+ self.assertEqual(bundle.documents[self.doc_key].snapshot._data["version"], 2)
+
+ def test_add_document_with_different_read_times(self):
+ bundle = FirestoreBundle("test")
+ doc = _test_helpers.build_document_snapshot(
+ client=_test_helpers.make_client(),
+ data={"version": 1},
+ read_time=_test_helpers.build_test_timestamp(second=1),
+ )
+ # Create another reference to the same document, but with new
+ # data and a more recent `read_time`
+ doc_refreshed = _test_helpers.build_document_snapshot(
+ client=_test_helpers.make_client(),
+ data={"version": 2},
+ read_time=_test_helpers.build_test_timestamp(second=2),
+ )
+
+ bundle.add_document(doc)
+ self.assertEqual(
+ bundle.documents[self.doc_key].snapshot._data, {"version": 1},
+ )
+ bundle.add_document(doc_refreshed)
+ self.assertEqual(
+ bundle.documents[self.doc_key].snapshot._data, {"version": 2},
+ )
+
+ def test_add_query(self):
+ query = self._bundled_query_helper()
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ self.assertIsNotNone(bundle.named_queries.get("asdf"))
+ self.assertIsNotNone(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-1"
+ ]
+ )
+ self.assertIsNotNone(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-2"
+ ]
+ )
+
+ def test_add_query_twice(self):
+ query = self._bundled_query_helper()
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ self.assertRaises(ValueError, bundle.add_named_query, "asdf", query)
+
+ def test_adding_collection_raises_error(self):
+ col = self._bundled_collection_helper()
+ bundle = FirestoreBundle("test")
+ self.assertRaises(ValueError, bundle.add_named_query, "asdf", col)
+
+ def test_bundle_build(self):
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("best name", self._bundled_query_helper())
+ self.assertIsInstance(bundle.build(), str)
+
+ def test_get_documents(self):
+ bundle = FirestoreBundle("test")
+ query: Query = self._bundled_query_helper() # type: ignore
+ bundle.add_named_query("sweet query", query)
+ docs_iter = _helpers._get_documents_from_bundle(
+ bundle, query_name="sweet query"
+ )
+ doc = next(docs_iter)
+ self.assertEqual(doc.id, "doc-1")
+ doc = next(docs_iter)
+ self.assertEqual(doc.id, "doc-2")
+
+ # Now an empty one
+ docs_iter = _helpers._get_documents_from_bundle(
+ bundle, query_name="wrong query"
+ )
+ doc = next(docs_iter, None)
+ self.assertIsNone(doc)
+
+ def test_get_documents_two_queries(self):
+ bundle = FirestoreBundle("test")
+ query: Query = self._bundled_query_helper() # type: ignore
+ bundle.add_named_query("sweet query", query)
+
+ query: Query = self._bundled_query_helper(document_ids=["doc-3", "doc-4"]) # type: ignore
+ bundle.add_named_query("second query", query)
+
+ docs_iter = _helpers._get_documents_from_bundle(
+ bundle, query_name="sweet query"
+ )
+ doc = next(docs_iter)
+ self.assertEqual(doc.id, "doc-1")
+ doc = next(docs_iter)
+ self.assertEqual(doc.id, "doc-2")
+
+ docs_iter = _helpers._get_documents_from_bundle(
+ bundle, query_name="second query"
+ )
+ doc = next(docs_iter)
+ self.assertEqual(doc.id, "doc-3")
+ doc = next(docs_iter)
+ self.assertEqual(doc.id, "doc-4")
+
+ def test_get_document(self):
+ bundle = FirestoreBundle("test")
+ query: Query = self._bundled_query_helper() # type: ignore
+ bundle.add_named_query("sweet query", query)
+
+ self.assertIsNotNone(
+ _helpers._get_document_from_bundle(
+ bundle,
+ document_id="projects/project-project/databases/(default)/documents/col/doc-1",
+ ),
+ )
+
+ self.assertIsNone(
+ _helpers._get_document_from_bundle(
+ bundle,
+ document_id="projects/project-project/databases/(default)/documents/col/doc-0",
+ ),
+ )
+
+
+class TestAsyncBundle(_CollectionQueryMixin, unittest.TestCase):
+ @staticmethod
+ def get_client():
+ return _test_helpers.make_async_client()
+
+ @staticmethod
+ def build_results_iterable(items):
+ return test__helpers.AsyncIter(items)
+
+ @staticmethod
+ def get_internal_client_mock():
+ return test__helpers.AsyncMock(spec=["run_query"])
+
+ @classmethod
+ def get_collection_class(cls):
+ return AsyncCollectionReference
+
+ def test_async_query(self):
+ # Create an async query, but this test does not need to be
+ # marked as async by pytest because `bundle.add_named_query()`
+ # seemlessly handles accepting async iterables.
+ async_query = self._bundled_query_helper()
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", async_query)
+ self.assertIsNotNone(bundle.named_queries.get("asdf"))
+ self.assertIsNotNone(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-1"
+ ]
+ )
+ self.assertIsNotNone(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-2"
+ ]
+ )
+
+
+class TestBundleBuilder(_CollectionQueryMixin, unittest.TestCase):
+ @staticmethod
+ def build_results_iterable(items):
+ return iter(items)
+
+ @staticmethod
+ def get_client():
+ return _test_helpers.make_client()
+
+ @staticmethod
+ def get_internal_client_mock():
+ return mock.create_autospec(FirestoreClient)
+
+ @classmethod
+ def get_collection_class(cls):
+ return CollectionReference
+
+ def test_build_round_trip(self):
+ query = self._bundled_query_helper()
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ serialized = bundle.build()
+ self.assertEqual(
+ serialized, _helpers.deserialize_bundle(serialized, query._client).build(),
+ )
+
+ def test_build_round_trip_emojis(self):
+ smile = "😂"
+ mermaid = "🧜🏿♀️"
+ query = self._bundled_query_helper(
+ data=[{"smile": smile}, {"compound": mermaid}],
+ )
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ serialized = bundle.build()
+ reserialized_bundle = _helpers.deserialize_bundle(serialized, query._client)
+
+ self.assertEqual(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-1"
+ ].snapshot._data["smile"],
+ smile,
+ )
+ self.assertEqual(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-2"
+ ].snapshot._data["compound"],
+ mermaid,
+ )
+ self.assertEqual(
+ serialized, reserialized_bundle.build(),
+ )
+
+ def test_build_round_trip_more_unicode(self):
+ bano = "baño"
+ chinese_characters = "殷周金文集成引得"
+ query = self._bundled_query_helper(
+ data=[{"bano": bano}, {"international": chinese_characters}],
+ )
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ serialized = bundle.build()
+ reserialized_bundle = _helpers.deserialize_bundle(serialized, query._client)
+
+ self.assertEqual(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-1"
+ ].snapshot._data["bano"],
+ bano,
+ )
+ self.assertEqual(
+ bundle.documents[
+ "projects/project-project/databases/(default)/documents/col/doc-2"
+ ].snapshot._data["international"],
+ chinese_characters,
+ )
+ self.assertEqual(
+ serialized, reserialized_bundle.build(),
+ )
+
+ def test_roundtrip_binary_data(self):
+ query = self._bundled_query_helper(data=[{"binary_data": b"\x0f"}],)
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ serialized = bundle.build()
+ reserialized_bundle = _helpers.deserialize_bundle(serialized, query._client)
+ gen = _helpers._get_documents_from_bundle(reserialized_bundle)
+ snapshot = next(gen)
+ self.assertEqual(
+ int.from_bytes(snapshot._data["binary_data"], byteorder=sys.byteorder), 15,
+ )
+
+ def test_deserialize_from_seconds_nanos(self):
+ """Some SDKs (Node) serialize Timestamp values to
+ '{"seconds": 123, "nanos": 456}', instead of an ISO-formatted string.
+ This tests deserialization from that format."""
+
+ client = _test_helpers.make_client(project_name="fir-bundles-test")
+
+ _serialized: str = (
+ '139{"metadata":{"id":"test-bundle","createTime":'
+ + '{"seconds":"1616434660","nanos":913764000},"version":1,"totalDocuments"'
+ + ':1,"totalBytes":"829"}}224{"namedQuery":{"name":"self","bundledQuery":'
+ + '{"parent":"projects/fir-bundles-test/databases/(default)/documents",'
+ + '"structuredQuery":{"from":[{"collectionId":"bundles"}]}},"readTime":'
+ + '{"seconds":"1616434660","nanos":913764000}}}194{"documentMetadata":'
+ + '{"name":"projects/fir-bundles-test/databases/(default)/documents/'
+ + 'bundles/test-bundle","readTime":{"seconds":"1616434660","nanos":'
+ + '913764000},"exists":true,"queries":["self"]}}402{"document":{"name":'
+ + '"projects/fir-bundles-test/databases/(default)/documents/bundles/'
+ + 'test-bundle","fields":{"clientCache":{"stringValue":"1200"},'
+ + '"serverCache":{"stringValue":"600"},"queries":{"mapValue":{"fields":'
+ + '{"self":{"mapValue":{"fields":{"collection":{"stringValue":"bundles"'
+ + '}}}}}}}},"createTime":{"seconds":"1615488796","nanos":163327000},'
+ + '"updateTime":{"seconds":"1615492486","nanos":34157000}}}'
+ )
+
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, _serialized, client=client,
+ )
+
+ # The following assertions would test deserialization of NodeJS bundles
+ # were explicit handling of that edge case to be added.
+
+ # First, deserialize that value into a Bundle instance. If this succeeds,
+ # we're off to a good start.
+ # bundle = _helpers.deserialize_bundle(_serialized, client=client)
+ # Second, re-serialize it into a Python-centric format (aka, ISO timestamps)
+ # instead of seconds/nanos.
+ # re_serialized = bundle.build()
+ # # Finally, confirm the round trip.
+ # self.assertEqual(
+ # re_serialized,
+ # _helpers.deserialize_bundle(re_serialized, client=client).build(),
+ # )
+
+ def test_deserialized_bundle_cached_metadata(self):
+ query = self._bundled_query_helper()
+ bundle = FirestoreBundle("test")
+ bundle.add_named_query("asdf", query)
+ bundle_copy = _helpers.deserialize_bundle(bundle.build(), query._client)
+ self.assertIsInstance(bundle_copy, FirestoreBundle)
+ self.assertIsNotNone(bundle_copy._deserialized_metadata)
+ bundle_copy.add_named_query("second query", query)
+ self.assertIsNone(bundle_copy._deserialized_metadata)
+
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_invalid_json(self, fnc):
+ client = _test_helpers.make_client()
+ fnc.return_value = iter([{}])
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "does not matter", client,
+ )
+
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_not_metadata_first(self, fnc):
+ client = _test_helpers.make_client()
+ fnc.return_value = iter([{"document": {}}])
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "does not matter", client,
+ )
+
+ @mock.patch("google.cloud.firestore_bundle.FirestoreBundle._add_bundle_element")
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_unexpected_termination(self, fnc, _):
+ client = _test_helpers.make_client()
+ # invalid bc `document_metadata` must be followed by a `document`
+ fnc.return_value = [{"metadata": {"id": "asdf"}}, {"documentMetadata": {}}]
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "does not matter", client,
+ )
+
+ @mock.patch("google.cloud.firestore_bundle.FirestoreBundle._add_bundle_element")
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_valid_passes(self, fnc, _):
+ client = _test_helpers.make_client()
+ fnc.return_value = [
+ {"metadata": {"id": "asdf"}},
+ {"documentMetadata": {}},
+ {"document": {}},
+ ]
+ _helpers.deserialize_bundle("does not matter", client)
+
+ @mock.patch("google.cloud.firestore_bundle.FirestoreBundle._add_bundle_element")
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_invalid_bundle(self, fnc, _):
+ client = _test_helpers.make_client()
+ # invalid bc `document` must follow `document_metadata`
+ fnc.return_value = [{"metadata": {"id": "asdf"}}, {"document": {}}]
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "does not matter", client,
+ )
+
+ @mock.patch("google.cloud.firestore_bundle.FirestoreBundle._add_bundle_element")
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_invalid_bundle_element_type(self, fnc, _):
+ client = _test_helpers.make_client()
+ # invalid bc `wtfisthis?` is obviously invalid
+ fnc.return_value = [{"metadata": {"id": "asdf"}}, {"wtfisthis?": {}}]
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "does not matter", client,
+ )
+
+ @mock.patch("google.cloud.firestore_bundle.FirestoreBundle._add_bundle_element")
+ @mock.patch("google.cloud.firestore_v1._helpers._parse_bundle_elements_data")
+ def test_invalid_bundle_start(self, fnc, _):
+ client = _test_helpers.make_client()
+ # invalid bc first element must be of key `metadata`
+ fnc.return_value = [{"document": {}}]
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "does not matter", client,
+ )
+
+ def test_not_actually_a_bundle_at_all(self):
+ client = _test_helpers.make_client()
+ self.assertRaises(
+ ValueError, _helpers.deserialize_bundle, "{}", client,
+ )
+
+ def test_add_invalid_bundle_element_type(self):
+ client = _test_helpers.make_client()
+ bundle = FirestoreBundle("asdf")
+ self.assertRaises(
+ ValueError,
+ bundle._add_bundle_element,
+ BundleElement(),
+ client=client,
+ type="asdf",
+ )
diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py
index b75dfdfa2b..feaec81194 100644
--- a/tests/unit/v1/test_collection.py
+++ b/tests/unit/v1/test_collection.py
@@ -17,6 +17,8 @@
import mock
+from tests.unit.v1 import _test_helpers
+
class TestCollectionReference(unittest.TestCase):
@staticmethod
@@ -89,7 +91,7 @@ def test_add_auto_assigned(self):
firestore_api.commit.return_value = commit_response
create_doc_response = document.Document()
firestore_api.create_document.return_value = create_doc_response
- client = _make_client()
+ client = _test_helpers.make_client()
client._firestore_api_internal = firestore_api
# Actually make a collection.
@@ -140,7 +142,7 @@ def _write_pb_for_create(document_path, document_data):
def _add_helper(self, retry=None, timeout=None):
from google.cloud.firestore_v1.document import DocumentReference
- from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1 import _helpers as _fs_v1_helpers
# Create a minimal fake GAPIC with a dummy response.
firestore_api = mock.Mock(spec=["commit"])
@@ -155,7 +157,7 @@ def _add_helper(self, retry=None, timeout=None):
firestore_api.commit.return_value = commit_response
# Attach the fake GAPIC to a real client.
- client = _make_client()
+ client = _test_helpers.make_client()
client._firestore_api_internal = firestore_api
# Actually make a collection and call add().
@@ -163,7 +165,7 @@ def _add_helper(self, retry=None, timeout=None):
document_data = {"zorp": 208.75, "i-did-not": b"know that"}
doc_id = "child"
- kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout)
update_time, document_ref = collection.add(
document_data, document_id=doc_id, **kwargs
)
@@ -196,7 +198,7 @@ def test_add_w_retry_timeout(self):
self._add_helper(retry=retry, timeout=timeout)
def _list_documents_helper(self, page_size=None, retry=None, timeout=None):
- from google.cloud.firestore_v1 import _helpers
+ from google.cloud.firestore_v1 import _helpers as _fs_v1_helpers
from google.api_core.page_iterator import Iterator
from google.api_core.page_iterator import Page
from google.cloud.firestore_v1.document import DocumentReference
@@ -213,7 +215,7 @@ def _next_page(self):
page, self._pages = self._pages[0], self._pages[1:]
return Page(self, page, self.item_to_value)
- client = _make_client()
+ client = _test_helpers.make_client()
template = client._database_string + "/documents/{}"
document_ids = ["doc-1", "doc-2"]
documents = [
@@ -224,7 +226,7 @@ def _next_page(self):
api_client.list_documents.return_value = iterator
client._firestore_api_internal = api_client
collection = self._make_one("collection", client=client)
- kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
+ kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout)
if page_size is not None:
documents = list(collection.list_documents(page_size=page_size, **kwargs))
@@ -245,6 +247,7 @@ def _next_page(self):
"collection_id": collection.id,
"page_size": page_size,
"show_missing": True,
+ "mask": {"field_paths": None},
},
metadata=client._rpc_metadata,
**kwargs,
@@ -346,16 +349,3 @@ def test_on_snapshot(self, watch):
collection = self._make_one("collection")
collection.on_snapshot(None)
watch.for_query.assert_called_once()
-
-
-def _make_credentials():
- import google.auth.credentials
-
- return mock.Mock(spec=google.auth.credentials.Credentials)
-
-
-def _make_client():
- from google.cloud.firestore_v1.client import Client
-
- credentials = _make_credentials()
- return Client(project="project-project", credentials=credentials)
diff --git a/tests/unit/v1/test_cross_language.py b/tests/unit/v1/test_cross_language.py
index 49bc11506e..6d57c110ab 100644
--- a/tests/unit/v1/test_cross_language.py
+++ b/tests/unit/v1/test_cross_language.py
@@ -23,6 +23,7 @@
from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.types import write
+from google.protobuf.timestamp_pb2 import Timestamp
from tests.unit.v1 import conformance_tests
@@ -134,20 +135,28 @@ def test_create_testprotos(test_proto):
@pytest.mark.parametrize("test_proto", _GET_TESTPROTOS)
def test_get_testprotos(test_proto):
testcase = test_proto.get
- firestore_api = mock.Mock(spec=["get_document"])
- response = document.Document()
- firestore_api.get_document.return_value = response
+ firestore_api = mock.Mock(spec=["batch_get_documents", "_client"])
+ response = firestore.BatchGetDocumentsResponse()
+ response.read_time = Timestamp(seconds=0, nanos=0)
+ response.found = document.Document()
+ response.found.fields = {}
+ response.found.create_time = Timestamp(seconds=0, nanos=0)
+ response.found.update_time = Timestamp(seconds=0, nanos=0)
+ firestore_api.batch_get_documents.return_value = iter([response])
+ firestore_api._client._database_string = "projects/projectID/databases/(default)"
client, doc = _make_client_document(firestore_api, testcase)
+ response.found.name = doc._document_path
doc.get() # No '.textprotos' for errors, field_paths.
expected_request = {
- "name": doc._document_path,
+ "database": firestore_api._client._database_string,
+ "documents": [doc._document_path],
"mask": None,
"transaction": None,
}
- firestore_api.get_document.assert_called_once_with(
+ firestore_api.batch_get_documents.assert_called_once_with(
request=expected_request, metadata=client._rpc_metadata,
)
diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py
index 6ca9b3096b..30c8a1c16c 100644
--- a/tests/unit/v1/test_document.py
+++ b/tests/unit/v1/test_document.py
@@ -366,33 +366,45 @@ def _get_helper(
field_paths=None,
use_transaction=False,
not_found=False,
+ # This should be an impossible case, but we test against it for
+ # completeness
+ return_empty=False,
retry=None,
timeout=None,
):
- from google.api_core.exceptions import NotFound
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1.types import document
+ from google.cloud.firestore_v1.types import firestore
from google.cloud.firestore_v1.transaction import Transaction
# Create a minimal fake GAPIC with a dummy response.
create_time = 123
update_time = 234
- firestore_api = mock.Mock(spec=["get_document"])
- response = mock.create_autospec(document.Document)
- response.fields = {}
- response.create_time = create_time
- response.update_time = update_time
-
- if not_found:
- firestore_api.get_document.side_effect = NotFound("testing")
- else:
- firestore_api.get_document.return_value = response
+ read_time = 345
+ firestore_api = mock.Mock(spec=["batch_get_documents"])
+ response = mock.create_autospec(firestore.BatchGetDocumentsResponse)
+ response.read_time = read_time
+ response.found = mock.create_autospec(document.Document)
+ response.found.fields = {}
+ response.found.create_time = create_time
+ response.found.update_time = update_time
client = _make_client("donut-base")
client._firestore_api_internal = firestore_api
+ document_reference = self._make_one("where", "we-are", client=client)
- document = self._make_one("where", "we-are", client=client)
+ response.found.name = None if not_found else document_reference._document_path
+ response.missing = document_reference._document_path if not_found else None
+
+ def WhichOneof(val):
+ return "missing" if not_found else "found"
+
+ response._pb = response
+ response._pb.WhichOneof = WhichOneof
+ firestore_api.batch_get_documents.return_value = iter(
+ [response] if not return_empty else []
+ )
if use_transaction:
transaction = Transaction(client)
@@ -402,21 +414,21 @@ def _get_helper(
kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
- snapshot = document.get(
+ snapshot = document_reference.get(
field_paths=field_paths, transaction=transaction, **kwargs
)
- self.assertIs(snapshot.reference, document)
- if not_found:
+ self.assertIs(snapshot.reference, document_reference)
+ if not_found or return_empty:
self.assertIsNone(snapshot._data)
self.assertFalse(snapshot.exists)
- self.assertIsNone(snapshot.read_time)
+ self.assertIsNotNone(snapshot.read_time)
self.assertIsNone(snapshot.create_time)
self.assertIsNone(snapshot.update_time)
else:
self.assertEqual(snapshot.to_dict(), {})
self.assertTrue(snapshot.exists)
- self.assertIsNone(snapshot.read_time)
+ self.assertIs(snapshot.read_time, read_time)
self.assertIs(snapshot.create_time, create_time)
self.assertIs(snapshot.update_time, update_time)
@@ -431,9 +443,10 @@ def _get_helper(
else:
expected_transaction_id = None
- firestore_api.get_document.assert_called_once_with(
+ firestore_api.batch_get_documents.assert_called_once_with(
request={
- "name": document._document_path,
+ "database": client._database_string,
+ "documents": [document_reference._document_path],
"mask": mask,
"transaction": expected_transaction_id,
},
@@ -447,6 +460,9 @@ def test_get_not_found(self):
def test_get_default(self):
self._get_helper()
+ def test_get_return_empty(self):
+ self._get_helper(return_empty=True)
+
def test_get_w_retry_timeout(self):
from google.api_core.retry import Retry
diff --git a/tests/unit/v1/testdata/query-invalid-operator.json b/tests/unit/v1/testdata/query-invalid-operator.json
index c53e5c2bdf..0acfeae67e 100644
--- a/tests/unit/v1/testdata/query-invalid-operator.json
+++ b/tests/unit/v1/testdata/query-invalid-operator.json
@@ -2,7 +2,7 @@
"tests": [
{
"description": "query: invalid operator in Where clause",
- "comment": "The |~| operator is not supported.",
+ "comment": "The |~| operator is not supported.",
"query": {
"collPath": "projects/projectID/databases/(default)/documents/C",
"clauses": [
diff --git a/tests/unit/v1/testdata/set-arrayunion-merge.json b/tests/unit/v1/testdata/set-arrayunion-merge.json
new file mode 100644
index 0000000000..46c2fbfb32
--- /dev/null
+++ b/tests/unit/v1/testdata/set-arrayunion-merge.json
@@ -0,0 +1,48 @@
+{
+ "tests": [
+ {
+ "description": "set: merge ArrayUnion field",
+ "comment": "An ArrayUnion value can occur at any depth. In this case,\nthe transform applies to the field path \"b.c\". \"a\" is left alone and remains in the object.",
+ "set": {
+ "option": {
+ "all": true
+ },
+ "docRefPath": "projects/projectID/databases/(default)/documents/C/d",
+ "jsonData": "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", \"foo\", \"bar\"]}}",
+ "request": {
+ "database": "projects/projectID/databases/(default)",
+ "writes": [
+ {
+ "update": {
+ "name": "projects/projectID/databases/(default)/documents/C/d",
+ "fields": {
+ "a": {
+ "integerValue": "1"
+ }
+ }
+ },
+ "updateMask": {
+ "fieldPaths": ["a"]
+ },
+ "updateTransforms": [
+ {
+ "fieldPath": "b.c",
+ "appendMissingElements": {
+ "values": [
+ {
+ "stringValue": "foo"
+ },
+ {
+ "stringValue": "bar"
+ }
+ ]
+ }
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+}